mirror of
https://github.com/sartography/spiff-arena.git
synced 2025-02-23 14:48:35 +00:00
Merge commit '35ef5cbe54a18fc177ab2593001d76ab1412c382'
This commit is contained in:
commit
09e5bf3182
@ -168,7 +168,7 @@ def feelParseISODuration(input):
|
||||
|
||||
"""
|
||||
if input[0] != 'P':
|
||||
raise Exception("Oh Crap!")
|
||||
raise Exception("ISO Duration format must begin with the letter P")
|
||||
input = input[1:]
|
||||
days, time = input.split("T")
|
||||
lookups = [("Y",days,timedelta(days=365)),
|
||||
@ -239,7 +239,7 @@ fixes = [(r'string\s+length\((.+?)\)','len(\\1)'),
|
||||
('true','True'),
|
||||
('false','False')
|
||||
]
|
||||
|
||||
|
||||
externalFuncs = {
|
||||
'feelConvertTime':feelConvertTime,
|
||||
'FeelInterval':FeelInterval,
|
||||
|
@ -4,7 +4,7 @@ import copy
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from ..exceptions import SpiffWorkflowException, WorkflowTaskException
|
||||
from ..operators import Operator
|
||||
|
||||
|
||||
@ -118,10 +118,11 @@ class PythonScriptEngine(object):
|
||||
return expression._matches(task)
|
||||
else:
|
||||
return self._evaluate(expression, task.data, external_methods)
|
||||
except SpiffWorkflowException as se:
|
||||
se.add_note(f"Error evaluating expression '{expression}'")
|
||||
raise se
|
||||
except Exception as e:
|
||||
raise WorkflowTaskExecException(task,
|
||||
f"Error evaluating expression {expression}",
|
||||
e)
|
||||
raise WorkflowTaskException(f"Error evaluating expression '{expression}'", task=task, exception=e)
|
||||
|
||||
def execute(self, task, script, external_methods=None):
|
||||
"""
|
||||
@ -141,25 +142,33 @@ class PythonScriptEngine(object):
|
||||
raise NotImplementedError("To call external services override the script engine and implement `call_service`.")
|
||||
|
||||
def create_task_exec_exception(self, task, script, err):
|
||||
|
||||
if isinstance(err, WorkflowTaskExecException):
|
||||
line_number, error_line = self.get_error_line_number_and_content(script, err)
|
||||
if isinstance(err, SpiffWorkflowException):
|
||||
err.line_number = line_number
|
||||
err.error_line = error_line
|
||||
err.add_note(f"Python script error on line {line_number}: '{error_line}'")
|
||||
return err
|
||||
|
||||
detail = err.__class__.__name__
|
||||
if len(err.args) > 0:
|
||||
detail += ":" + err.args[0]
|
||||
return WorkflowTaskException(detail, task=task, exception=err, line_number=line_number, error_line=error_line)
|
||||
|
||||
def get_error_line_number_and_content(self, script, err):
|
||||
line_number = 0
|
||||
error_line = ''
|
||||
cl, exc, tb = sys.exc_info()
|
||||
# Loop back through the stack trace to find the file called
|
||||
# 'string' - which is the script we are executing, then use that
|
||||
# to parse and pull out the offending line.
|
||||
for frame_summary in traceback.extract_tb(tb):
|
||||
if frame_summary.filename == '<string>':
|
||||
line_number = frame_summary.lineno
|
||||
error_line = script.splitlines()[line_number - 1]
|
||||
return WorkflowTaskExecException(task, detail, err, line_number,
|
||||
error_line)
|
||||
if isinstance(err, SyntaxError):
|
||||
line_number = err.lineno
|
||||
else:
|
||||
cl, exc, tb = sys.exc_info()
|
||||
# Loop back through the stack trace to find the file called
|
||||
# 'string' - which is the script we are executing, then use that
|
||||
# to parse and pull out the offending line.
|
||||
for frame_summary in traceback.extract_tb(tb):
|
||||
if frame_summary.filename == '<string>':
|
||||
line_number = frame_summary.lineno
|
||||
if line_number > 0:
|
||||
error_line = script.splitlines()[line_number - 1]
|
||||
return line_number, error_line
|
||||
|
||||
def check_for_overwrite(self, task, external_methods):
|
||||
"""It's possible that someone will define a variable with the
|
||||
@ -172,7 +181,7 @@ class PythonScriptEngine(object):
|
||||
msg = f"You have task data that overwrites a predefined " \
|
||||
f"function(s). Please change the following variable or " \
|
||||
f"field name(s) to something else: {func_overwrites}"
|
||||
raise WorkflowTaskExecException(task, msg)
|
||||
raise WorkflowTaskException(msg, task=task)
|
||||
|
||||
def convert_to_box(self, data):
|
||||
if isinstance(data, dict):
|
||||
|
@ -1,50 +1,4 @@
|
||||
import re
|
||||
|
||||
from SpiffWorkflow.exceptions import WorkflowException, WorkflowTaskException
|
||||
from SpiffWorkflow.util import levenshtein
|
||||
|
||||
class WorkflowTaskExecException(WorkflowTaskException):
|
||||
"""
|
||||
Exception during execution of task "payload". For example:
|
||||
|
||||
* ScriptTask during execution of embedded script,
|
||||
* ServiceTask during external service call.
|
||||
"""
|
||||
|
||||
def __init__(self, task, error_msg, exception=None, line_number=0, error_line=""):
|
||||
"""
|
||||
Exception initialization.
|
||||
|
||||
:param task: the task that threw the exception
|
||||
:type task: Task
|
||||
:param exception: a human readable error message
|
||||
:type exception: Exception
|
||||
|
||||
"""
|
||||
|
||||
self.offset = 0
|
||||
self.line_number = line_number
|
||||
self.error_line = error_line
|
||||
|
||||
if isinstance(exception, SyntaxError):
|
||||
# Prefer line number from syntax error if available.
|
||||
self.line_number = exception.lineno
|
||||
self.offset = exception.offset
|
||||
elif isinstance(exception, NameError):
|
||||
def_match = re.match("name '(.+)' is not defined", str(exception))
|
||||
if def_match:
|
||||
bad_variable = re.match("name '(.+)' is not defined", str(exception)).group(1)
|
||||
most_similar = levenshtein.most_similar(bad_variable, task.data.keys(), 3)
|
||||
error_msg = f'something you are referencing does not exist: ' \
|
||||
f'"{exception}".'
|
||||
if len(most_similar) == 1:
|
||||
error_msg += f' Did you mean \'{most_similar[0]}\'?'
|
||||
if len(most_similar) > 1:
|
||||
error_msg += f' Did you mean one of \'{most_similar}\'?'
|
||||
|
||||
else:
|
||||
error_msg = str(exception)
|
||||
super().__init__(task, error_msg, exception)
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
|
||||
|
||||
class WorkflowDataException(WorkflowException):
|
||||
|
@ -21,7 +21,7 @@ import glob
|
||||
import os
|
||||
|
||||
from lxml import etree
|
||||
from lxml.etree import DocumentInvalid
|
||||
from lxml.etree import DocumentInvalid, LxmlError
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import NoneEventDefinition
|
||||
|
||||
@ -72,8 +72,13 @@ class BpmnValidator:
|
||||
def validate(self, bpmn, filename=None):
|
||||
try:
|
||||
self.validator.assertValid(bpmn)
|
||||
except DocumentInvalid as di:
|
||||
raise DocumentInvalid(str(di) + "file: " + filename)
|
||||
except ValidationException as ve:
|
||||
ve.file_name = filename
|
||||
ve.line_number = self.validator.error_log.last_error.line
|
||||
except LxmlError as le:
|
||||
last_error = self.validator.error_log.last_error
|
||||
raise ValidationException(last_error.message, file_name=filename,
|
||||
line_number=last_error.line)
|
||||
|
||||
class BpmnParser(object):
|
||||
"""
|
||||
@ -211,8 +216,7 @@ class BpmnParser(object):
|
||||
correlation_identifier = correlation.attrib.get("id")
|
||||
if correlation_identifier is None:
|
||||
raise ValidationException(
|
||||
"Correlation identifier is missing from bpmn xml"
|
||||
)
|
||||
"Correlation identifier is missing from bpmn xml" )
|
||||
correlation_property_retrieval_expressions = correlation.xpath(
|
||||
"//bpmn:correlationPropertyRetrievalExpression", namespaces = self.namespaces)
|
||||
if not correlation_property_retrieval_expressions:
|
||||
@ -243,9 +247,9 @@ class BpmnParser(object):
|
||||
def create_parser(self, node, filename=None, lane=None):
|
||||
parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, filename=filename, lane=lane)
|
||||
if parser.get_id() in self.process_parsers:
|
||||
raise ValidationException('Duplicate process ID', node=node, filename=filename)
|
||||
raise ValidationException('Duplicate process ID', node=node, file_name=filename)
|
||||
if parser.get_name() in self.process_parsers_by_name:
|
||||
raise ValidationException('Duplicate process name', node=node, filename=filename)
|
||||
raise ValidationException('Duplicate process name', node=node, file_name=filename)
|
||||
self.process_parsers[parser.get_id()] = parser
|
||||
self.process_parsers_by_name[parser.get_name()] = parser
|
||||
|
||||
|
@ -93,7 +93,7 @@ class ProcessParser(NodeParser):
|
||||
(node_parser, spec_class) = self.parser._get_parser_class(node.tag)
|
||||
if not node_parser or not spec_class:
|
||||
raise ValidationException("There is no support implemented for this task type.",
|
||||
node=node, filename=self.filename)
|
||||
node=node, file_name=self.filename)
|
||||
np = node_parser(self, spec_class, node, lane=self.lane)
|
||||
task_spec = np.parse_node()
|
||||
return task_spec
|
||||
@ -103,7 +103,7 @@ class ProcessParser(NodeParser):
|
||||
# bpmn:startEvent if we have a subworkflow task
|
||||
start_node_list = self.xpath('./bpmn:startEvent')
|
||||
if not start_node_list and self.process_executable:
|
||||
raise ValidationException("No start event found", node=self.node, filename=self.filename)
|
||||
raise ValidationException("No start event found", node=self.node, file_name=self.filename)
|
||||
self.spec = BpmnProcessSpec(name=self.get_id(), description=self.get_name(), filename=self.filename)
|
||||
|
||||
# Check for an IO Specification.
|
||||
|
@ -72,7 +72,7 @@ class TaskParser(NodeParser):
|
||||
raise ValidationException(
|
||||
f'Unsupported MultiInstance Task: {self.task.__class__}',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
file_name=self.filename)
|
||||
|
||||
self.task.loopTask = loop_task
|
||||
self.task.isSequential = is_sequential
|
||||
@ -127,17 +127,15 @@ class TaskParser(NodeParser):
|
||||
self.spec, '%s.BoundaryEventParent' % self.get_id(),
|
||||
self.task, lane=self.task.lane)
|
||||
self.process_parser.parsed_nodes[self.node.get('id')] = parent
|
||||
parent.connect_outgoing(self.task, '%s.FromBoundaryEventParent' % self.get_id(), None, None)
|
||||
parent.connect(self.task)
|
||||
for event in children:
|
||||
child = self.process_parser.parse_node(event)
|
||||
if isinstance(child.event_definition, CancelEventDefinition) \
|
||||
and not isinstance(self.task, TransactionSubprocess):
|
||||
raise ValidationException('Cancel Events may only be used with transactions',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
parent.connect_outgoing(child,
|
||||
'%s.FromBoundaryEventParent' % event.get('id'),
|
||||
None, None)
|
||||
node=self.node,
|
||||
file_name=self.filename)
|
||||
parent.connect(child)
|
||||
return parent
|
||||
|
||||
def parse_node(self):
|
||||
@ -169,7 +167,7 @@ class TaskParser(NodeParser):
|
||||
'Multiple outgoing flows are not supported for '
|
||||
'tasks of type',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
file_name=self.filename)
|
||||
for sequence_flow in outgoing:
|
||||
target_ref = sequence_flow.get('targetRef')
|
||||
try:
|
||||
@ -179,7 +177,7 @@ class TaskParser(NodeParser):
|
||||
'When looking for a task spec, we found two items, '
|
||||
'perhaps a form has the same ID? (%s)' % target_ref,
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
file_name=self.filename)
|
||||
|
||||
c = self.process_parser.parse_node(target_node)
|
||||
position = c.position
|
||||
@ -196,18 +194,13 @@ class TaskParser(NodeParser):
|
||||
default_outgoing = sequence_flow.get('id')
|
||||
|
||||
for (position, c, target_node, sequence_flow) in children:
|
||||
self.connect_outgoing(
|
||||
c, target_node, sequence_flow,
|
||||
sequence_flow.get('id') == default_outgoing)
|
||||
self.connect_outgoing(c, sequence_flow, sequence_flow.get('id') == default_outgoing)
|
||||
|
||||
return parent if boundary_event_nodes else self.task
|
||||
except ValidationException:
|
||||
raise
|
||||
except ValidationException as ve:
|
||||
raise ve
|
||||
except Exception as ex:
|
||||
exc_info = sys.exc_info()
|
||||
tb = "".join(traceback.format_exception(
|
||||
exc_info[0], exc_info[1], exc_info[2]))
|
||||
raise ValidationException("%r" % (ex), node=self.node, filename=self.filename)
|
||||
raise ValidationException("%r" % (ex), node=self.node, file_name=self.filename)
|
||||
|
||||
def get_task_spec_name(self, target_ref=None):
|
||||
"""
|
||||
@ -225,18 +218,13 @@ class TaskParser(NodeParser):
|
||||
description=self.node.get('name', None),
|
||||
position=self.position)
|
||||
|
||||
def connect_outgoing(self, outgoing_task, outgoing_task_node,
|
||||
sequence_flow_node, is_default):
|
||||
def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default):
|
||||
"""
|
||||
Connects this task to the indicating outgoing task, with the details in
|
||||
the sequence flow. A subclass can override this method to get extra
|
||||
information from the node.
|
||||
"""
|
||||
self.task.connect_outgoing(
|
||||
outgoing_task, sequence_flow_node.get('id'),
|
||||
sequence_flow_node.get(
|
||||
'name', None),
|
||||
self.parse_documentation(sequence_flow_node))
|
||||
self.task.connect(outgoing_task)
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
"""
|
||||
|
@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
# Copyright (C) 2012 Matthew Hampton, 2023 Dan Funk
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
@ -17,10 +17,10 @@
|
||||
# 02110-1301 USA
|
||||
|
||||
from .util import BPMN_MODEL_NS
|
||||
from ...exceptions import SpiffWorkflowException
|
||||
|
||||
|
||||
class ValidationException(Exception):
|
||||
|
||||
class ValidationException(SpiffWorkflowException):
|
||||
"""
|
||||
A ValidationException should be thrown with enough information for the user
|
||||
to diagnose the problem and sort it out.
|
||||
@ -28,23 +28,20 @@ class ValidationException(Exception):
|
||||
If available, please provide the offending XML node and filename.
|
||||
"""
|
||||
|
||||
def __init__(self, msg, node=None, filename=None, *args, **kwargs):
|
||||
def __init__(self, msg, node=None, file_name=None, *args, **kwargs):
|
||||
if node is not None:
|
||||
self.tag = self._shorten_tag(node.tag)
|
||||
self.id = node.get('id', '<Unknown>')
|
||||
self.name = node.get('name', '<Unknown>')
|
||||
self.sourceline = getattr(node, 'sourceline', '<Unknown>')
|
||||
self.id = node.get('id', '')
|
||||
self.name = node.get('name', '')
|
||||
self.line_number = getattr(node, 'line_number', '')
|
||||
else:
|
||||
self.tag = '<Unknown>'
|
||||
self.id = '<Unknown>'
|
||||
self.name = '<Unknown>'
|
||||
self.sourceline = '<Unknown>'
|
||||
self.filename = filename or '<Unknown File>'
|
||||
message = ('%s\nSource Details: '
|
||||
'%s (id:%s), name \'%s\', line %s in %s') % (
|
||||
msg, self.tag, self.id, self.name, self.sourceline, self.filename)
|
||||
self.tag = kwargs.get('tag', '')
|
||||
self.id = kwargs.get('id', '')
|
||||
self.name = kwargs.get('name', '')
|
||||
self.line_number = kwargs.get('line_number', '')
|
||||
self.file_name = file_name or ''
|
||||
|
||||
super(ValidationException, self).__init__(message, *args, **kwargs)
|
||||
super(ValidationException, self).__init__(msg, *args)
|
||||
|
||||
@classmethod
|
||||
def _shorten_tag(cls, tag):
|
||||
|
@ -93,9 +93,9 @@ class EventDefinitionParser(TaskParser):
|
||||
time_cycle = first(self.xpath('.//bpmn:timeCycle'))
|
||||
if time_cycle is not None:
|
||||
return CycleTimerEventDefinition(label, time_cycle.text)
|
||||
raise ValidationException("Unknown Time Specification", node=self.node, filename=self.filename)
|
||||
raise ValidationException("Unknown Time Specification", node=self.node, file_name=self.filename)
|
||||
except Exception as e:
|
||||
raise ValidationException("Time Specification Error. " + str(e), node=self.node, filename=self.filename)
|
||||
raise ValidationException("Time Specification Error. " + str(e), node=self.node, file_name=self.filename)
|
||||
|
||||
def get_message_correlations(self, message_ref):
|
||||
|
||||
@ -186,7 +186,7 @@ class EndEventParser(EventDefinitionParser):
|
||||
event_definition = self.get_event_definition([MESSAGE_EVENT_XPATH, CANCEL_EVENT_XPATH, ERROR_EVENT_XPATH,
|
||||
ESCALATION_EVENT_XPATH, TERMINATION_EVENT_XPATH])
|
||||
task = self._create_task(event_definition)
|
||||
task.connect_outgoing(self.spec.end, '%s.ToEndJoin' % self.node.get('id'), None, None)
|
||||
task.connect(self.spec.end)
|
||||
return task
|
||||
|
||||
|
||||
@ -251,12 +251,6 @@ class EventBasedGatewayParser(EventDefinitionParser):
|
||||
def handles_multiple_outgoing(self):
|
||||
return True
|
||||
|
||||
def connect_outgoing(self, outgoing_task, outgoing_task_node, sequence_flow_node, is_default):
|
||||
def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default):
|
||||
self.task.event_definition.event_definitions.append(outgoing_task.event_definition)
|
||||
self.task.connect_outgoing(
|
||||
outgoing_task,
|
||||
sequence_flow_node.get('id'),
|
||||
sequence_flow_node.get('name', None),
|
||||
self.parse_documentation(sequence_flow_node)
|
||||
)
|
||||
|
||||
self.task.connect(outgoing_task)
|
||||
|
@ -46,7 +46,7 @@ class NodeParser:
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
else:
|
||||
raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.filename)
|
||||
raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.file_name)
|
||||
return specs
|
||||
|
||||
def parse_outgoing_data_references(self):
|
||||
@ -56,7 +56,7 @@ class NodeParser:
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
else:
|
||||
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename)
|
||||
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.file_name)
|
||||
return specs
|
||||
|
||||
def parse_extensions(self, node=None):
|
||||
|
@ -57,25 +57,17 @@ class ExclusiveGatewayParser(TaskParser):
|
||||
appropriately.
|
||||
"""
|
||||
|
||||
def connect_outgoing(self, outgoing_task, outgoing_task_node,
|
||||
sequence_flow_node, is_default):
|
||||
def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default):
|
||||
if is_default:
|
||||
super(ExclusiveGatewayParser, self).connect_outgoing(
|
||||
outgoing_task, outgoing_task_node, sequence_flow_node,
|
||||
is_default)
|
||||
super(ExclusiveGatewayParser, self).connect_outgoing(outgoing_task, sequence_flow_node, is_default)
|
||||
else:
|
||||
cond = self.parse_condition(sequence_flow_node)
|
||||
if cond is None:
|
||||
raise ValidationException(
|
||||
'Non-default exclusive outgoing sequence flow '
|
||||
' without condition',
|
||||
'Non-default exclusive outgoing sequence flow without condition',
|
||||
sequence_flow_node,
|
||||
self.filename)
|
||||
self.task.connect_outgoing_if(
|
||||
cond, outgoing_task,
|
||||
sequence_flow_node.get('id'),
|
||||
sequence_flow_node.get('name', None),
|
||||
self.parse_documentation(sequence_flow_node))
|
||||
self.task.connect_outgoing_if(cond, outgoing_task)
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
return True
|
||||
@ -121,12 +113,12 @@ class SubprocessParser:
|
||||
raise ValidationException(
|
||||
'Multiple Start points are not allowed in SubWorkflow Task',
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
file_name=task_parser.filename)
|
||||
if len(workflow_end_event) == 0:
|
||||
raise ValidationException(
|
||||
'A SubWorkflow Must contain an End event',
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
file_name=task_parser.filename)
|
||||
|
||||
nsmap = DEFAULT_NSMAP.copy()
|
||||
nsmap['camunda'] = "http://camunda.org/schema/1.0/bpmn"
|
||||
@ -151,14 +143,14 @@ class SubprocessParser:
|
||||
raise ValidationException(
|
||||
'No "calledElement" attribute for Call Activity.',
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
file_name=task_parser.filename)
|
||||
parser = task_parser.process_parser.parser.get_process_parser(called_element)
|
||||
if parser is None:
|
||||
raise ValidationException(
|
||||
f"The process '{called_element}' was not found. Did you mean one of the following: "
|
||||
f"{', '.join(task_parser.process_parser.parser.get_process_ids())}?",
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
file_name=task_parser.filename)
|
||||
return called_element
|
||||
|
||||
|
||||
@ -206,7 +198,7 @@ class ScriptTaskParser(TaskParser):
|
||||
except AssertionError as ae:
|
||||
raise ValidationException(
|
||||
f"Invalid Script Task. No Script Provided. " + str(ae),
|
||||
node=self.node, filename=self.filename)
|
||||
node=self.node, file_name=self.filename)
|
||||
|
||||
|
||||
class ServiceTaskParser(TaskParser):
|
||||
|
@ -1,178 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2020 Matthew Hampton, Dan Funk
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import configparser
|
||||
from io import BytesIO, TextIOWrapper
|
||||
from warnings import warn
|
||||
|
||||
from lxml import etree
|
||||
import zipfile
|
||||
import os
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from ...bpmn.specs.SubWorkflowTask import SubWorkflowTask
|
||||
from ...bpmn.workflow import BpmnWorkflow
|
||||
from ...bpmn.serializer.json import BPMNJSONSerializer
|
||||
from ..parser.BpmnParser import BpmnParser
|
||||
from .Packager import Packager
|
||||
|
||||
class BpmnSerializer(BPMNJSONSerializer):
|
||||
"""
|
||||
DEPRECATED --- This call remains available only so that folks can deserialize
|
||||
existing workflows.
|
||||
The BpmnSerializer class provides support for deserializing a Bpmn Workflow
|
||||
Spec from a BPMN package. The BPMN package must have been created using the
|
||||
:class:`SpiffWorkflow.bpmn.serializer.Packager`.
|
||||
|
||||
It will also use the appropriate subclass of BpmnParser, if one is included
|
||||
in the metadata.ini file.
|
||||
"""
|
||||
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
"""This throws a deprecation warning on subclassing."""
|
||||
warn(f'{cls.__name__} is deprecated. Please use '
|
||||
f'bpmn.serializer.workflow.BpmnWorkflowSerializer',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
def serialize_workflow(self, workflow, **kwargs):
|
||||
"""
|
||||
*** DEPRECATED *** DEPRECATED *** DEPRECATED *** DEPRECATED ***
|
||||
Serializes the workflow data and task tree. Will also serialize
|
||||
the Spec if 'include_spec' kwarg is not set to false.
|
||||
Please use bpmn.serializer.workflow.BpmnWorkflowSerializer for
|
||||
Serialization. This class remains available only to help transition
|
||||
to the new Serialization scheme.
|
||||
"""
|
||||
"""This throws a deprecation warning on initialization."""
|
||||
warn(f'{self.__class__.__name__} is deprecated. DO NOT continue to '
|
||||
f'use it for serialization. Deserialize your old workflows, then'
|
||||
f'move to the new serializer for storing. See '
|
||||
f'bpmn.serializer.workflow.BpmnWorkflowSerializer',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
assert isinstance(workflow, BpmnWorkflow)
|
||||
include_spec = kwargs.get('include_spec',True)
|
||||
return super().serialize_workflow(workflow, include_spec=include_spec)
|
||||
|
||||
def serialize_task(self, task, skip_children=False, **kwargs):
|
||||
return super().serialize_task(task,
|
||||
skip_children=skip_children,
|
||||
allow_subs=True)
|
||||
|
||||
def deserialize_workflow(self, s_state, workflow_spec=None,
|
||||
read_only=False, **kwargs):
|
||||
|
||||
return super().deserialize_workflow(s_state,
|
||||
wf_class=BpmnWorkflow,
|
||||
wf_spec=workflow_spec,
|
||||
read_only=read_only,
|
||||
**kwargs)
|
||||
|
||||
def _deserialize_task_children(self, task, s_state):
|
||||
"""Reverses the internal process that will merge children from a
|
||||
sub-workflow in the top level workflow. This copies the states
|
||||
back into the sub-workflow after generating it from the base spec"""
|
||||
if not isinstance(task.task_spec, SubWorkflowTask):
|
||||
return super()._deserialize_task_children(task, s_state)
|
||||
|
||||
sub_workflow = task.task_spec.create_sub_workflow(task)
|
||||
children = []
|
||||
for c in s_state['children']:
|
||||
# One child belongs to the parent workflow (The path back
|
||||
# out of the subworkflow) the other children belong to the
|
||||
# sub-workflow.
|
||||
|
||||
# We need to determine if we are still in the same workflow,
|
||||
# Ideally we can just check: if c['workflow_name'] == sub_workflow.name
|
||||
# however, we need to support deserialization of workflows without this
|
||||
# critical property, at least temporarily, so people can migrate.
|
||||
if 'workflow_name' in c:
|
||||
same_workflow = c['workflow_name'] == sub_workflow.name
|
||||
else:
|
||||
same_workflow = sub_workflow.get_tasks_from_spec_name(c['task_spec'])
|
||||
|
||||
if same_workflow:
|
||||
start_task = self.deserialize_task(sub_workflow, c)
|
||||
children.append(start_task)
|
||||
start_task.parent = task.id
|
||||
sub_workflow.task_tree = start_task
|
||||
# get a list of tasks in reverse order of change
|
||||
# our last task should be on the top.
|
||||
tasks = sub_workflow.get_tasks(TaskState.COMPLETED)
|
||||
tasks.sort(key=lambda x: x.last_state_change,reverse=True)
|
||||
if len(tasks)>0:
|
||||
last_task = tasks[0]
|
||||
sub_workflow.last_task = last_task
|
||||
else:
|
||||
resume_task = self.deserialize_task(task.workflow, c)
|
||||
resume_task.parent = task.id
|
||||
children.append(resume_task)
|
||||
return children
|
||||
|
||||
def deserialize_task(self, workflow, s_state):
|
||||
assert isinstance(workflow, BpmnWorkflow)
|
||||
return super().deserialize_task(workflow, s_state)
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, filename=None):
|
||||
"""
|
||||
:param s_state: a byte-string with the contents of the packaged
|
||||
workflow archive, or a file-like object.
|
||||
|
||||
:param filename: the name of the package file.
|
||||
"""
|
||||
if isinstance(s_state,dict):
|
||||
return super().deserialize_workflow_spec(s_state)
|
||||
if isinstance(s_state,str):
|
||||
return super().deserialize_workflow_spec(s_state)
|
||||
if isinstance(s_state, bytes):
|
||||
s_state = BytesIO(s_state)
|
||||
|
||||
package_zip = zipfile.ZipFile(
|
||||
s_state, "r", compression=zipfile.ZIP_DEFLATED)
|
||||
config = configparser.ConfigParser()
|
||||
ini_fp = TextIOWrapper(
|
||||
package_zip.open(Packager.METADATA_FILE), encoding="UTF-8")
|
||||
try:
|
||||
config.read_file(ini_fp)
|
||||
finally:
|
||||
ini_fp.close()
|
||||
|
||||
parser_class = BpmnParser
|
||||
parser_class_module = config.get(
|
||||
'MetaData', 'parser_class_module', fallback=None)
|
||||
|
||||
if parser_class_module:
|
||||
mod = __import__(parser_class_module, fromlist=[
|
||||
config.get('MetaData', 'parser_class')])
|
||||
parser_class = getattr(mod, config.get('MetaData', 'parser_class'))
|
||||
|
||||
parser = parser_class()
|
||||
|
||||
for info in package_zip.infolist():
|
||||
parts = os.path.split(info.filename)
|
||||
if (len(parts) == 2 and not parts[0] and parts[1].lower().endswith('.bpmn')):
|
||||
# It is in the root of the ZIP and is a BPMN file
|
||||
bpmn_fp = package_zip.open(info)
|
||||
try:
|
||||
bpmn = etree.parse(bpmn_fp)
|
||||
finally:
|
||||
bpmn_fp.close()
|
||||
|
||||
parser.add_bpmn_xml(bpmn, filename='%s:%s' % (filename, info.filename))
|
||||
spec_name = config.get('MetaData', 'entry_point_process')
|
||||
return parser.get_spec(spec_name)
|
@ -1,483 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from builtins import str
|
||||
from builtins import hex
|
||||
from builtins import range
|
||||
from builtins import object
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from collections import deque
|
||||
import json
|
||||
from ...task import TaskState
|
||||
from ...specs.Subworkflow import SubWorkflow
|
||||
from ...serializer.base import Serializer
|
||||
from ..workflow import BpmnWorkflow
|
||||
|
||||
|
||||
class UnrecoverableWorkflowChange(Exception):
|
||||
"""
|
||||
This is thrown if the workflow cannot be restored because the workflow spec
|
||||
has changed, and the identified transitions no longer exist.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class _RouteNode(object):
|
||||
"""
|
||||
Private helper class
|
||||
"""
|
||||
|
||||
def __init__(self, task_spec, outgoing_route_node=None):
|
||||
self.task_spec = task_spec
|
||||
self.outgoing = [outgoing_route_node] if outgoing_route_node else []
|
||||
self.state = None
|
||||
|
||||
def get_outgoing_by_spec(self, task_spec):
|
||||
m = [r for r in self.outgoing if r.task_spec == task_spec]
|
||||
return m[0] if m else None
|
||||
|
||||
def to_list(self):
|
||||
result = []
|
||||
n = self
|
||||
while n.outgoing:
|
||||
assert len(
|
||||
n.outgoing) == 1, "to_list(..) cannot be called after a merge"
|
||||
result.append(n.task_spec)
|
||||
n = n.outgoing[0]
|
||||
result.append(n.task_spec)
|
||||
return result
|
||||
|
||||
def contains(self, other_route):
|
||||
if isinstance(other_route, list):
|
||||
return self.to_list()[0:len(other_route)] == other_route
|
||||
|
||||
# This only works before merging
|
||||
assert len(other_route.outgoing) <= 1,\
|
||||
"contains(..) cannot be called after a merge"
|
||||
assert len(self.outgoing) <= 1,\
|
||||
"contains(..) cannot be called after a merge"
|
||||
|
||||
if other_route.task_spec == self.task_spec:
|
||||
if other_route.outgoing and self.outgoing:
|
||||
return self.outgoing[0].contains(other_route.outgoing[0])
|
||||
elif self.outgoing:
|
||||
return True
|
||||
elif not other_route.outgoing:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class _BpmnProcessSpecState(object):
|
||||
|
||||
"""
|
||||
Private helper class
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
self.route = None
|
||||
|
||||
def get_path_to_transition(self, transition, state, workflow_parents,
|
||||
taken_routes=None):
|
||||
# find a route passing through each task:
|
||||
route = [self.spec.start]
|
||||
route_to_parent_complete = None
|
||||
for task_name in workflow_parents:
|
||||
route = self._breadth_first_task_search(str(task_name), route)
|
||||
if route is None:
|
||||
raise UnrecoverableWorkflowChange(
|
||||
'No path found for route \'%s\'' % transition)
|
||||
route_to_parent_complete = route + [route[-1].outputs[0]]
|
||||
route = route + [route[-1].spec.start]
|
||||
route = self._breadth_first_transition_search(
|
||||
transition, route, taken_routes=taken_routes)
|
||||
if route is None:
|
||||
raise UnrecoverableWorkflowChange(
|
||||
'No path found for route \'%s\'' % transition)
|
||||
outgoing_route_node = None
|
||||
for spec in reversed(route):
|
||||
outgoing_route_node = _RouteNode(spec, outgoing_route_node)
|
||||
outgoing_route_node.state = state
|
||||
return outgoing_route_node, route_to_parent_complete
|
||||
|
||||
def add_route(self, outgoing_route_node):
|
||||
if self.route:
|
||||
self._merge_routes(self.route, outgoing_route_node)
|
||||
else:
|
||||
self.route = outgoing_route_node
|
||||
|
||||
def dump(self):
|
||||
print(self.get_dump())
|
||||
|
||||
def get_dump(self):
|
||||
def recursive_dump(route_node, indent, verbose=False):
|
||||
|
||||
task_spec = route_node.task_spec
|
||||
dump = '%s (%s:%s)' % (
|
||||
task_spec.name,
|
||||
task_spec.__class__.__name__,
|
||||
hex(id(task_spec))) + '\n'
|
||||
if verbose:
|
||||
if task_spec.inputs:
|
||||
dump += indent + '- IN: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t)))
|
||||
for t in task_spec.inputs]) + '\n'
|
||||
if task_spec.outputs:
|
||||
dump += indent + '- OUT: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t)))
|
||||
for t in task_spec.outputs]) + '\n'
|
||||
|
||||
for i, t in enumerate(route_node.outgoing):
|
||||
dump += indent + ' --> ' + \
|
||||
recursive_dump(
|
||||
t, indent + (
|
||||
' | ' if i + 1 < len(route_node.outgoing)
|
||||
else ' '))
|
||||
return dump
|
||||
|
||||
dump = recursive_dump(self.route, '')
|
||||
return dump
|
||||
|
||||
def go(self, workflow):
|
||||
leaf_tasks = []
|
||||
self._go(workflow.task_tree.children[0], self.route, leaf_tasks)
|
||||
for task in sorted(
|
||||
leaf_tasks,
|
||||
key=lambda t: 0 if getattr(
|
||||
t, '_bpmn_load_target_state', TaskState.READY) == TaskState.READY
|
||||
else 1):
|
||||
task.task_spec._update(task)
|
||||
task._inherit_data()
|
||||
if hasattr(task, '_bpmn_load_target_state'):
|
||||
delattr(task, '_bpmn_load_target_state')
|
||||
|
||||
def _go(self, task, route_node, leaf_tasks):
|
||||
assert task.task_spec == route_node.task_spec
|
||||
if not route_node.outgoing:
|
||||
assert route_node.state is not None
|
||||
setattr(task, '_bpmn_load_target_state', route_node.state)
|
||||
leaf_tasks.append(task)
|
||||
else:
|
||||
if not task._is_finished():
|
||||
if (issubclass(task.task_spec.__class__, SubWorkflow) and
|
||||
task.task_spec.spec.start in
|
||||
[o.task_spec for o in route_node.outgoing]):
|
||||
self._go_in_to_subworkflow(
|
||||
task, [n.task_spec for n in route_node.outgoing])
|
||||
else:
|
||||
self._complete_task_silent(
|
||||
task, [n.task_spec for n in route_node.outgoing])
|
||||
for n in route_node.outgoing:
|
||||
matching_child = [
|
||||
t for t in task.children if t.task_spec == n.task_spec]
|
||||
assert len(matching_child) == 1
|
||||
self._go(matching_child[0], n, leaf_tasks)
|
||||
|
||||
def _complete_task_silent(self, task, target_children_specs):
|
||||
# This method simulates the completing of a task, but without hooks
|
||||
# being called, and targeting a specific subset of the children
|
||||
if task._is_finished():
|
||||
return
|
||||
task._set_state(TaskState.COMPLETED)
|
||||
|
||||
task.children = []
|
||||
for task_spec in target_children_specs:
|
||||
task._add_child(task_spec)
|
||||
|
||||
def _go_in_to_subworkflow(self, my_task, target_children_specs):
|
||||
# This method simulates the entering of a subworkflow, but without
|
||||
# hooks being called, and targeting a specific subset of the entry
|
||||
# tasks in the subworkflow. It creates the new workflow instance and
|
||||
# merges it in to the tree This is based on
|
||||
# SubWorkflow._on_ready_before_hook(..)
|
||||
if my_task._is_finished():
|
||||
return
|
||||
|
||||
subworkflow = my_task.task_spec._create_subworkflow(my_task)
|
||||
subworkflow.completed_event.connect(
|
||||
my_task.task_spec._on_subworkflow_completed, my_task)
|
||||
|
||||
# Create the children (these are the tasks that follow the subworkflow,
|
||||
# on completion:
|
||||
my_task.children = []
|
||||
my_task._sync_children(my_task.task_spec.outputs, TaskState.FUTURE)
|
||||
for t in my_task.children:
|
||||
t.task_spec._predict(t)
|
||||
|
||||
# Integrate the tree of the subworkflow into the tree of this workflow.
|
||||
for child in subworkflow.task_tree.children:
|
||||
if child.task_spec in target_children_specs:
|
||||
my_task.children.insert(0, child)
|
||||
child.parent = my_task
|
||||
|
||||
my_task._set_internal_data(subworkflow=subworkflow)
|
||||
|
||||
my_task._set_state(TaskState.COMPLETED)
|
||||
|
||||
def _merge_routes(self, target, src):
|
||||
assert target.task_spec == src.task_spec
|
||||
for out_route in src.outgoing:
|
||||
target_out_route = target.get_outgoing_by_spec(out_route.task_spec)
|
||||
if target_out_route:
|
||||
self._merge_routes(target_out_route, out_route)
|
||||
else:
|
||||
target.outgoing.append(out_route)
|
||||
|
||||
def _breadth_first_transition_search(self, transition_id, starting_route,
|
||||
taken_routes=None):
|
||||
return self._breadth_first_search(starting_route,
|
||||
transition_id=transition_id,
|
||||
taken_routes=taken_routes)
|
||||
|
||||
def _breadth_first_task_search(self, task_name, starting_route):
|
||||
return self._breadth_first_search(starting_route, task_name=task_name)
|
||||
|
||||
def _breadth_first_search(self, starting_route, task_name=None,
|
||||
transition_id=None, taken_routes=None):
|
||||
q = deque()
|
||||
done = set()
|
||||
q.append(starting_route)
|
||||
while q:
|
||||
route = q.popleft()
|
||||
if not route[-1] == starting_route[-1]:
|
||||
if task_name and route[-1].name == task_name:
|
||||
return route
|
||||
if (transition_id and
|
||||
hasattr(route[-1], 'has_outgoing_sequence_flow') and
|
||||
route[-1].has_outgoing_sequence_flow(transition_id)):
|
||||
spec = route[-1].get_outgoing_sequence_flow_by_id(
|
||||
transition_id).target_task_spec
|
||||
if taken_routes:
|
||||
final_route = route + [spec]
|
||||
for taken in taken_routes:
|
||||
t = taken.to_list() if not isinstance(
|
||||
taken, list) else taken
|
||||
if final_route[0:len(t)] == t:
|
||||
spec = None
|
||||
break
|
||||
if spec:
|
||||
route.append(spec)
|
||||
return route
|
||||
for child in route[-1].outputs:
|
||||
new_route = route + [child]
|
||||
if len(new_route) > 10000:
|
||||
raise ValueError("Maximum looping limit exceeded "
|
||||
"searching for path to % s" %
|
||||
(task_name or transition_id))
|
||||
new_route_r = tuple(new_route)
|
||||
if new_route_r not in done:
|
||||
done.add(new_route_r)
|
||||
q.append(new_route)
|
||||
return None
|
||||
|
||||
|
||||
class CompactWorkflowSerializer(Serializer):
|
||||
"""
|
||||
This class provides an implementation of serialize_workflow and
|
||||
deserialize_workflow that produces a compact representation of the workflow
|
||||
state, that can be stored in a database column or reasonably small size.
|
||||
|
||||
It records ONLY enough information to identify the transition leading in to
|
||||
each WAITING or READY state, along with the state of that task. This is
|
||||
generally enough to resurrect a running BPMN workflow instance, with some
|
||||
limitations.
|
||||
|
||||
Limitations:
|
||||
1. The compact representation does not include any workflow or task data.
|
||||
It is the responsibility of the calling application to record whatever
|
||||
data is relevant to it, and set it on the restored workflow.
|
||||
2. The restoring process will not produce exactly the same workflow tree -
|
||||
it finds the SHORTEST route to the saved READY and WAITING tasks, not
|
||||
the route that was actually taken. This means that the tree cannot be
|
||||
interrogated for historical information about the workflow. However, the
|
||||
workflow does follow the same logic paths as would have been followed by
|
||||
the original workflow.
|
||||
"""
|
||||
|
||||
STATE_SPEC_VERSION = 1
|
||||
|
||||
def serialize_workflow_spec(self, wf_spec, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"The CompactWorkflowSerializer only supports "
|
||||
" workflow serialization.")
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"The CompactWorkflowSerializer only supports "
|
||||
"workflow serialization.")
|
||||
|
||||
def serialize_workflow(self, workflow, include_spec=False, **kwargs):
|
||||
"""
|
||||
:param workflow: the workflow instance to serialize
|
||||
|
||||
:param include_spec: Always set to False (The CompactWorkflowSerializer
|
||||
only supports workflow serialization)
|
||||
"""
|
||||
if include_spec:
|
||||
raise NotImplementedError(
|
||||
'Including the spec serialization with the workflow state '
|
||||
'is not implemented.')
|
||||
return self._get_workflow_state(workflow)
|
||||
|
||||
def deserialize_workflow(self, s_state, workflow_spec=None,
|
||||
read_only=False, **kwargs):
|
||||
"""
|
||||
:param s_state: the state of the workflow as returned by
|
||||
serialize_workflow
|
||||
|
||||
:param workflow_spec: the Workflow Spec of the workflow
|
||||
(CompactWorkflowSerializer only supports workflow serialization)
|
||||
|
||||
:param read_only: (Optional) True if the workflow should be restored in
|
||||
READ ONLY mode
|
||||
|
||||
NB: Additional kwargs passed to the deserialize_workflow method will be
|
||||
passed to the new_workflow method.
|
||||
"""
|
||||
if workflow_spec is None:
|
||||
raise NotImplementedError(
|
||||
'Including the spec serialization with the workflow state is '
|
||||
' not implemented. A \'workflow_spec\' must '
|
||||
'be provided.')
|
||||
workflow = self.new_workflow(
|
||||
workflow_spec, read_only=read_only, **kwargs)
|
||||
self._restore_workflow_state(workflow, s_state)
|
||||
return workflow
|
||||
|
||||
def new_workflow(self, workflow_spec, read_only=False, **kwargs):
|
||||
"""
|
||||
Create a new workflow instance from the given spec and arguments.
|
||||
|
||||
:param workflow_spec: the workflow spec to use
|
||||
|
||||
:param read_only: this should be in read only mode
|
||||
|
||||
:param kwargs: Any extra kwargs passed to the deserialize_workflow
|
||||
method will be passed through here
|
||||
"""
|
||||
return BpmnWorkflow(workflow_spec, read_only=read_only, **kwargs)
|
||||
|
||||
def _get_workflow_state(self, workflow):
|
||||
active_tasks = workflow.get_tasks(state=(TaskState.READY | TaskState.WAITING))
|
||||
states = []
|
||||
|
||||
for task in active_tasks:
|
||||
parent_task_spec = task.parent.task_spec
|
||||
transition = parent_task_spec.get_outgoing_sequence_flow_by_spec(
|
||||
task.task_spec).id
|
||||
w = task.workflow
|
||||
workflow_parents = []
|
||||
while w.outer_workflow and w.outer_workflow != w:
|
||||
workflow_parents.append(w.name)
|
||||
w = w.outer_workflow
|
||||
state = ("W" if task.state == TaskState.WAITING else "R")
|
||||
states.append(
|
||||
[transition, list(reversed(workflow_parents)), state])
|
||||
|
||||
compacted_states = []
|
||||
for state in sorted(states,
|
||||
key=lambda s: ",".join([s[0],
|
||||
s[2],
|
||||
(':'.join(s[1]))])):
|
||||
if state[-1] == 'R':
|
||||
state.pop()
|
||||
if state[-1] == []:
|
||||
state.pop()
|
||||
if len(state) == 1:
|
||||
state = state[0]
|
||||
compacted_states.append(state)
|
||||
|
||||
state_list = compacted_states + [self.STATE_SPEC_VERSION]
|
||||
state_s = json.dumps(state_list)[1:-1]
|
||||
return state_s
|
||||
|
||||
def _restore_workflow_state(self, workflow, state):
|
||||
state_list = json.loads('[' + state + ']')
|
||||
|
||||
self._check_spec_version(state_list[-1])
|
||||
|
||||
s = _BpmnProcessSpecState(workflow.spec)
|
||||
|
||||
routes = []
|
||||
for state in state_list[:-1]:
|
||||
if isinstance(state, str) or type(state).__name__ == 'str':
|
||||
state = [str(state)]
|
||||
transition = state[0]
|
||||
workflow_parents = state[1] if len(state) > 1 else []
|
||||
state = (TaskState.WAITING if len(state) >
|
||||
2 and state[2] == 'W' else TaskState.READY)
|
||||
|
||||
route, route_to_parent_complete = s.get_path_to_transition(
|
||||
transition, state, workflow_parents)
|
||||
routes.append(
|
||||
(route, route_to_parent_complete, transition, state,
|
||||
workflow_parents))
|
||||
|
||||
retry = True
|
||||
retry_count = 0
|
||||
while (retry):
|
||||
if retry_count > 100:
|
||||
raise ValueError(
|
||||
'Maximum retry limit exceeded searching for unique paths')
|
||||
retry = False
|
||||
|
||||
for i in range(len(routes)):
|
||||
(route, route_to_parent_complete, transition, state,
|
||||
workflow_parents) = routes[i]
|
||||
|
||||
for j in range(len(routes)):
|
||||
if i == j:
|
||||
continue
|
||||
other_route = routes[j][0]
|
||||
route_to_parent_complete = routes[j][1]
|
||||
if route.contains(other_route) or (
|
||||
route_to_parent_complete and route.contains(
|
||||
route_to_parent_complete)):
|
||||
taken_routes = [r for r in routes if r[0] != route]
|
||||
taken_routes = [r for r in [r[0] for r
|
||||
in taken_routes] +
|
||||
[r[1] for r in taken_routes] if r]
|
||||
(route,
|
||||
route_to_parent_complete) = s.get_path_to_transition(
|
||||
transition, state, workflow_parents,
|
||||
taken_routes=taken_routes)
|
||||
for r in taken_routes:
|
||||
assert not route.contains(r)
|
||||
routes[
|
||||
i] = (route, route_to_parent_complete, transition,
|
||||
state, workflow_parents)
|
||||
retry = True
|
||||
retry_count += 1
|
||||
break
|
||||
if retry:
|
||||
break
|
||||
|
||||
for r in routes:
|
||||
s.add_route(r[0])
|
||||
|
||||
workflow._busy_with_restore = True
|
||||
try:
|
||||
if len(state_list) <= 1:
|
||||
workflow.cancel(success=True)
|
||||
return
|
||||
s.go(workflow)
|
||||
finally:
|
||||
workflow._busy_with_restore = False
|
||||
|
||||
def _check_spec_version(self, v):
|
||||
# We only have one version right now:
|
||||
assert v == self.STATE_SPEC_VERSION
|
@ -1,548 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from builtins import object
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import os
|
||||
import configparser
|
||||
import glob
|
||||
import hashlib
|
||||
import inspect
|
||||
import zipfile
|
||||
from io import StringIO
|
||||
from optparse import OptionParser, OptionGroup
|
||||
from ..parser.BpmnParser import BpmnParser
|
||||
from ..parser.ValidationException import ValidationException
|
||||
from ..parser.util import xpath_eval, one
|
||||
from lxml import etree
|
||||
SIGNAVIO_NS = 'http://www.signavio.com'
|
||||
CONFIG_SECTION_NAME = "Packager Options"
|
||||
|
||||
|
||||
def md5hash(data):
|
||||
if not isinstance(data, bytes):
|
||||
data = data.encode('UTF-8')
|
||||
|
||||
return hashlib.md5(data).hexdigest().lower()
|
||||
|
||||
|
||||
class Packager(object):
|
||||
"""
|
||||
The Packager class pre-parses a set of BPMN files (together with their SVG
|
||||
representation), validates the contents and then produces a ZIP-based
|
||||
archive containing the pre-parsed BPMN and SVG files, the source files (for
|
||||
reference) and a metadata.ini file that contains enough information to
|
||||
create a BpmnProcessSpec instance from the archive (e.g. the ID of the
|
||||
entry point process).
|
||||
|
||||
This class can be extended and any public method overridden to do
|
||||
additional validation / parsing or to package additional metadata.
|
||||
|
||||
Extension point:
|
||||
|
||||
PARSER_CLASS: provide the class that should be used to parse the BPMN
|
||||
files. The fully-qualified name will be included in the metadata.ini file,
|
||||
so that the BpmnSerializer can instantiate the right parser to deal with
|
||||
the package.
|
||||
|
||||
Editor hooks: package_for_editor_<editor name>(self, spec, filename):
|
||||
Called once for each BPMN file. Should add any additional files to the
|
||||
archive.
|
||||
"""
|
||||
|
||||
METADATA_FILE = "metadata.ini"
|
||||
MANIFEST_FILE = "manifest.ini"
|
||||
PARSER_CLASS = BpmnParser
|
||||
|
||||
def __init__(self, package_file, entry_point_process, meta_data=None,
|
||||
editor=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param package_file: a file-like object where the contents of the
|
||||
package must be written to
|
||||
|
||||
:param entry_point_process: the name or ID of the entry point process
|
||||
|
||||
:param meta_data: A list of meta-data tuples to include in the
|
||||
metadata.ini file (in addition to the standard ones)
|
||||
|
||||
:param editor: The name of the editor used to create the source BPMN /
|
||||
SVG files. This activates additional hook method calls. (optional)
|
||||
"""
|
||||
self.package_file = package_file
|
||||
self.entry_point_process = entry_point_process
|
||||
self.parser = self.PARSER_CLASS()
|
||||
self.meta_data = meta_data or []
|
||||
self.input_files = []
|
||||
self.input_path_prefix = None
|
||||
self.editor = editor
|
||||
self.manifest = {}
|
||||
|
||||
def add_bpmn_file(self, filename):
|
||||
"""
|
||||
Add the given BPMN filename to the packager's set.
|
||||
"""
|
||||
self.add_bpmn_files([filename])
|
||||
|
||||
def add_bpmn_files_by_glob(self, g):
|
||||
"""
|
||||
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
|
||||
packager's set.
|
||||
"""
|
||||
self.add_bpmn_files(glob.glob(g))
|
||||
|
||||
def add_bpmn_files(self, filenames):
|
||||
"""
|
||||
Add all filenames in the given list to the packager's set.
|
||||
"""
|
||||
self.input_files += filenames
|
||||
|
||||
def create_package(self):
|
||||
"""
|
||||
Creates the package, writing the data out to the provided file-like
|
||||
object.
|
||||
"""
|
||||
|
||||
# Check that all files exist (and calculate the longest shared path
|
||||
# prefix):
|
||||
self.input_path_prefix = None
|
||||
for filename in self.input_files:
|
||||
if not os.path.isfile(filename):
|
||||
raise ValueError(
|
||||
'%s does not exist or is not a file' % filename)
|
||||
if self.input_path_prefix:
|
||||
full = os.path.abspath(os.path.dirname(filename))
|
||||
while not (full.startswith(self.input_path_prefix) and
|
||||
self.input_path_prefix):
|
||||
self.input_path_prefix = self.input_path_prefix[:-1]
|
||||
else:
|
||||
self.input_path_prefix = os.path.abspath(
|
||||
os.path.dirname(filename))
|
||||
|
||||
# Parse all of the XML:
|
||||
self.bpmn = {}
|
||||
for filename in self.input_files:
|
||||
bpmn = etree.parse(filename)
|
||||
self.bpmn[os.path.abspath(filename)] = bpmn
|
||||
|
||||
# Now run through pre-parsing and validation:
|
||||
for filename, bpmn in list(self.bpmn.items()):
|
||||
bpmn = self.pre_parse_and_validate(bpmn, filename)
|
||||
self.bpmn[os.path.abspath(filename)] = bpmn
|
||||
|
||||
# Now check that we can parse it fine:
|
||||
for filename, bpmn in list(self.bpmn.items()):
|
||||
self.parser.add_bpmn_xml(bpmn, filename=filename)
|
||||
# at this point, we have a item in self.wf_spec.get_specs_depth_first()
|
||||
# that has a filename of None and a bpmn that needs to be added to the
|
||||
# list below in for spec.
|
||||
self.wf_spec = self.parser.get_spec(self.entry_point_process)
|
||||
|
||||
# Now package everything:
|
||||
self.package_zip = zipfile.ZipFile(
|
||||
self.package_file, "w", compression=zipfile.ZIP_DEFLATED)
|
||||
|
||||
done_files = set()
|
||||
|
||||
for spec in self.wf_spec.get_specs_depth_first():
|
||||
filename = spec.file
|
||||
if filename is None:
|
||||
# This is for when we are doing a subworkflow, and it
|
||||
# creates something in the bpmn spec list, but it really has
|
||||
# no file. In this case, it is safe to skip the add to the
|
||||
# zip file.
|
||||
continue
|
||||
if filename not in done_files:
|
||||
done_files.add(filename)
|
||||
|
||||
bpmn = self.bpmn[os.path.abspath(filename)]
|
||||
self.write_to_package_zip(
|
||||
"%s.bpmn" % spec.name, etree.tostring(bpmn.getroot()))
|
||||
|
||||
self.write_to_package_zip(
|
||||
"src/" + self._get_zip_path(filename), filename)
|
||||
|
||||
self._call_editor_hook('package_for_editor', spec, filename)
|
||||
|
||||
self.write_meta_data()
|
||||
self.write_manifest()
|
||||
|
||||
self.package_zip.close()
|
||||
|
||||
def write_file_to_package_zip(self, filename, src_filename):
|
||||
"""
|
||||
Writes a local file in to the zip file and adds it to the manifest
|
||||
dictionary
|
||||
|
||||
:param filename: The zip file name
|
||||
|
||||
:param src_filename: the local file name
|
||||
"""
|
||||
f = open(src_filename)
|
||||
with f:
|
||||
data = f.read()
|
||||
self.manifest[filename] = md5hash(data)
|
||||
self.package_zip.write(src_filename, filename)
|
||||
|
||||
def write_to_package_zip(self, filename, data):
|
||||
"""
|
||||
Writes data to the zip file and adds it to the manifest dictionary
|
||||
|
||||
:param filename: The zip file name
|
||||
|
||||
:param data: the data
|
||||
"""
|
||||
self.manifest[filename] = md5hash(data)
|
||||
self.package_zip.writestr(filename, data)
|
||||
|
||||
def write_manifest(self):
|
||||
"""
|
||||
Write the manifest content to the zip file. It must be a predictable
|
||||
order.
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
config.add_section('Manifest')
|
||||
|
||||
for f in sorted(self.manifest.keys()):
|
||||
config.set('Manifest', f.replace(
|
||||
'\\', '/').lower(), self.manifest[f])
|
||||
|
||||
ini = StringIO()
|
||||
config.write(ini)
|
||||
self.manifest_data = ini.getvalue()
|
||||
self.package_zip.writestr(self.MANIFEST_FILE, self.manifest_data)
|
||||
|
||||
def pre_parse_and_validate(self, bpmn, filename):
|
||||
"""
|
||||
A subclass can override this method to provide additional parseing or
|
||||
validation. It should call the parent method first.
|
||||
|
||||
:param bpmn: an lxml tree of the bpmn content
|
||||
|
||||
:param filename: the source file name
|
||||
|
||||
This must return the updated bpmn object (or a replacement)
|
||||
"""
|
||||
bpmn = self._call_editor_hook(
|
||||
'pre_parse_and_validate', bpmn, filename) or bpmn
|
||||
|
||||
return bpmn
|
||||
|
||||
def pre_parse_and_validate_signavio(self, bpmn, filename):
|
||||
"""
|
||||
This is the Signavio specific editor hook for pre-parsing and
|
||||
validation.
|
||||
|
||||
A subclass can override this method to provide additional parseing or
|
||||
validation. It should call the parent method first.
|
||||
|
||||
:param bpmn: an lxml tree of the bpmn content
|
||||
|
||||
:param filename: the source file name
|
||||
|
||||
This must return the updated bpmn object (or a replacement)
|
||||
"""
|
||||
self._check_for_disconnected_boundary_events_signavio(bpmn, filename)
|
||||
self._fix_call_activities_signavio(bpmn, filename)
|
||||
return bpmn
|
||||
|
||||
def _check_for_disconnected_boundary_events_signavio(self, bpmn, filename):
|
||||
# signavio sometimes disconnects a BoundaryEvent from it's owning task
|
||||
# They then show up as intermediateCatchEvents without any incoming
|
||||
# sequence flows
|
||||
xpath = xpath_eval(bpmn)
|
||||
for catch_event in xpath('.//bpmn:intermediateCatchEvent'):
|
||||
incoming = xpath(
|
||||
'.//bpmn:sequenceFlow[@targetRef="%s"]' %
|
||||
catch_event.get('id'))
|
||||
if not incoming:
|
||||
raise ValidationException(
|
||||
'Intermediate Catch Event has no incoming sequences. '
|
||||
'This might be a Boundary Event that has been '
|
||||
'disconnected.',
|
||||
node=catch_event, filename=filename)
|
||||
|
||||
def _fix_call_activities_signavio(self, bpmn, filename):
|
||||
"""
|
||||
Signavio produces slightly invalid BPMN for call activity nodes... It
|
||||
is supposed to put a reference to the id of the called process in to
|
||||
the calledElement attribute. Instead it stores a string (which is the
|
||||
name of the process - not its ID, in our interpretation) in an
|
||||
extension tag.
|
||||
|
||||
This code gets the name of the 'subprocess reference', finds a process
|
||||
with a matching name, and sets the calledElement attribute to the id of
|
||||
the process.
|
||||
"""
|
||||
for node in xpath_eval(bpmn)(".//bpmn:callActivity"):
|
||||
calledElement = node.get('calledElement', None)
|
||||
if not calledElement:
|
||||
signavioMetaData = xpath_eval(node, extra_ns={
|
||||
'signavio': SIGNAVIO_NS})(
|
||||
'.//signavio:signavioMetaData[@metaKey="entry"]')
|
||||
if not signavioMetaData:
|
||||
raise ValidationException(
|
||||
'No Signavio "Subprocess reference" specified.',
|
||||
node=node, filename=filename)
|
||||
subprocess_reference = one(signavioMetaData).get('metaValue')
|
||||
matches = []
|
||||
for b in list(self.bpmn.values()):
|
||||
for p in xpath_eval(b)(".//bpmn:process"):
|
||||
if (p.get('name', p.get('id', None)) ==
|
||||
subprocess_reference):
|
||||
matches.append(p)
|
||||
if not matches:
|
||||
raise ValidationException(
|
||||
"No matching process definition found for '%s'." %
|
||||
subprocess_reference, node=node, filename=filename)
|
||||
if len(matches) != 1:
|
||||
raise ValidationException(
|
||||
"More than one matching process definition "
|
||||
" found for '%s'." % subprocess_reference, node=node,
|
||||
filename=filename)
|
||||
|
||||
node.set('calledElement', matches[0].get('id'))
|
||||
|
||||
def _call_editor_hook(self, hook, *args, **kwargs):
|
||||
if self.editor:
|
||||
hook_func = getattr(self, "%s_%s" % (hook, self.editor), None)
|
||||
if hook_func:
|
||||
return hook_func(*args, **kwargs)
|
||||
return None
|
||||
|
||||
def package_for_editor_signavio(self, spec, filename):
|
||||
"""
|
||||
Adds the SVG files to the archive for this BPMN file.
|
||||
"""
|
||||
signavio_file = filename[:-len('.bpmn20.xml')] + '.signavio.xml'
|
||||
if os.path.exists(signavio_file):
|
||||
self.write_file_to_package_zip(
|
||||
"src/" + self._get_zip_path(signavio_file), signavio_file)
|
||||
|
||||
f = open(signavio_file, 'r')
|
||||
try:
|
||||
signavio_tree = etree.parse(f)
|
||||
finally:
|
||||
f.close()
|
||||
svg_node = one(signavio_tree.findall('.//svg-representation'))
|
||||
self.write_to_package_zip("%s.svg" % spec.name, svg_node.text)
|
||||
|
||||
def write_meta_data(self):
|
||||
"""
|
||||
Writes the metadata.ini file to the archive.
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
config.add_section('MetaData')
|
||||
config.set('MetaData', 'entry_point_process', self.wf_spec.name)
|
||||
if self.editor:
|
||||
config.set('MetaData', 'editor', self.editor)
|
||||
|
||||
for k, v in self.meta_data:
|
||||
config.set('MetaData', k, v)
|
||||
|
||||
if not self.PARSER_CLASS == BpmnParser:
|
||||
config.set('MetaData', 'parser_class_module',
|
||||
inspect.getmodule(self.PARSER_CLASS).__name__)
|
||||
config.set('MetaData', 'parser_class', self.PARSER_CLASS.__name__)
|
||||
|
||||
ini = StringIO()
|
||||
config.write(ini)
|
||||
self.write_to_package_zip(self.METADATA_FILE, ini.getvalue())
|
||||
|
||||
def _get_zip_path(self, filename):
|
||||
p = os.path.abspath(filename)[
|
||||
len(self.input_path_prefix):].replace(os.path.sep, '/')
|
||||
while p.startswith('/'):
|
||||
p = p[1:]
|
||||
return p
|
||||
|
||||
@classmethod
|
||||
def get_version(cls):
|
||||
try:
|
||||
import pkg_resources # part of setuptools
|
||||
version = pkg_resources.require("SpiffWorkflow")[0].version
|
||||
except Exception:
|
||||
version = 'DEV'
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def create_option_parser(cls):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
return OptionParser(
|
||||
usage=("%prog [options] -o <package file> -p "
|
||||
"<entry point process> <input BPMN files ...>"),
|
||||
version="SpiffWorkflow BPMN Packager %s" % (cls.get_version()))
|
||||
|
||||
@classmethod
|
||||
def add_main_options(cls, parser):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
parser.add_option("-o", "--output", dest="package_file",
|
||||
help="create the BPMN package in the specified file")
|
||||
parser.add_option("-p", "--process", dest="entry_point_process",
|
||||
help="specify the entry point process")
|
||||
parser.add_option("-c", "--config-file", dest="config_file",
|
||||
help="specify a config file to use")
|
||||
parser.add_option(
|
||||
"-i", "--initialise-config-file", action="store_true",
|
||||
dest="init_config_file", default=False,
|
||||
help="create a new config file from the specified options")
|
||||
|
||||
group = OptionGroup(parser, "BPMN Editor Options",
|
||||
"These options are not required, but may be "
|
||||
" provided to activate special features of "
|
||||
"supported BPMN editors.")
|
||||
group.add_option("--editor", dest="editor",
|
||||
help="editors with special support: signavio")
|
||||
parser.add_option_group(group)
|
||||
|
||||
@classmethod
|
||||
def add_additional_options(cls, parser):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
group = OptionGroup(parser, "Target Engine Options",
|
||||
"These options are not required, but may be "
|
||||
"provided if a specific "
|
||||
"BPMN application engine is targeted.")
|
||||
group.add_option("-e", "--target-engine", dest="target_engine",
|
||||
help="target the specified BPMN application engine")
|
||||
group.add_option(
|
||||
"-t", "--target-version", dest="target_engine_version",
|
||||
help="target the specified version of the BPMN application engine")
|
||||
parser.add_option_group(group)
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, config, options, args, parser, package_file=None):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
if not args:
|
||||
parser.error("no input files specified")
|
||||
if not (package_file or options.package_file):
|
||||
parser.error("no package file specified")
|
||||
if not options.entry_point_process:
|
||||
parser.error("no entry point process specified")
|
||||
|
||||
@classmethod
|
||||
def merge_options_and_config(cls, config, options, args):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
if args:
|
||||
config.set(CONFIG_SECTION_NAME, 'input_files', ','.join(args))
|
||||
elif config.has_option(CONFIG_SECTION_NAME, 'input_files'):
|
||||
for i in config.get(CONFIG_SECTION_NAME, 'input_files').split(','):
|
||||
if not os.path.isabs(i):
|
||||
i = os.path.abspath(
|
||||
os.path.join(os.path.dirname(options.config_file), i))
|
||||
args.append(i)
|
||||
|
||||
cls.merge_option_and_config_str('package_file', config, options)
|
||||
cls.merge_option_and_config_str('entry_point_process', config, options)
|
||||
cls.merge_option_and_config_str('target_engine', config, options)
|
||||
cls.merge_option_and_config_str(
|
||||
'target_engine_version', config, options)
|
||||
cls.merge_option_and_config_str('editor', config, options)
|
||||
|
||||
@classmethod
|
||||
def merge_option_and_config_str(cls, option_name, config, options):
|
||||
"""
|
||||
Utility method to merge an option and config, with the option taking "
|
||||
precedence
|
||||
"""
|
||||
|
||||
opt = getattr(options, option_name, None)
|
||||
if opt:
|
||||
config.set(CONFIG_SECTION_NAME, option_name, opt)
|
||||
elif config.has_option(CONFIG_SECTION_NAME, option_name):
|
||||
setattr(options, option_name, config.get(
|
||||
CONFIG_SECTION_NAME, option_name))
|
||||
|
||||
@classmethod
|
||||
def create_meta_data(cls, options, args, parser):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
meta_data = []
|
||||
meta_data.append(('spiff_version', cls.get_version()))
|
||||
if options.target_engine:
|
||||
meta_data.append(('target_engine', options.target_engine))
|
||||
if options.target_engine:
|
||||
meta_data.append(
|
||||
('target_engine_version', options.target_engine_version))
|
||||
return meta_data
|
||||
|
||||
@classmethod
|
||||
def main(cls, argv=None, package_file=None):
|
||||
parser = cls.create_option_parser()
|
||||
|
||||
cls.add_main_options(parser)
|
||||
|
||||
cls.add_additional_options(parser)
|
||||
|
||||
(options, args) = parser.parse_args(args=argv)
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
if options.config_file:
|
||||
config.read(options.config_file)
|
||||
if not config.has_section(CONFIG_SECTION_NAME):
|
||||
config.add_section(CONFIG_SECTION_NAME)
|
||||
|
||||
cls.merge_options_and_config(config, options, args)
|
||||
if options.init_config_file:
|
||||
if not options.config_file:
|
||||
parser.error(
|
||||
"no config file specified - cannot initialise config file")
|
||||
f = open(options.config_file, "w")
|
||||
with f:
|
||||
config.write(f)
|
||||
return
|
||||
|
||||
cls.check_args(config, options, args, parser, package_file)
|
||||
|
||||
meta_data = cls.create_meta_data(options, args, parser)
|
||||
|
||||
packager = cls(package_file=package_file or options.package_file,
|
||||
entry_point_process=options.entry_point_process,
|
||||
meta_data=meta_data, editor=options.editor)
|
||||
for a in args:
|
||||
packager.add_bpmn_files_by_glob(a)
|
||||
packager.create_package()
|
||||
|
||||
return packager
|
||||
|
||||
|
||||
def main(packager_class=None):
|
||||
"""
|
||||
:param packager_class: The Packager class to use. Default: Packager.
|
||||
"""
|
||||
|
||||
if not packager_class:
|
||||
packager_class = Packager
|
||||
|
||||
packager_class.main()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -12,7 +12,7 @@ from ..specs.events.event_definitions import TimerEventDefinition, CycleTimerEve
|
||||
from ..specs.events.event_definitions import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition
|
||||
from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition
|
||||
|
||||
from ..specs.BpmnSpecMixin import BpmnSpecMixin, SequenceFlow
|
||||
from ..specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...operators import Attrib, PathAttrib
|
||||
|
||||
|
||||
@ -100,7 +100,6 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
||||
partial(self.event_defintion_from_dict, event_definition)
|
||||
)
|
||||
|
||||
self.register(SequenceFlow, self.sequence_flow_to_dict, self.sequence_flow_from_dict)
|
||||
self.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib))
|
||||
self.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib))
|
||||
self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict)
|
||||
@ -160,12 +159,6 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
||||
'documentation': spec.documentation,
|
||||
'loopTask': spec.loopTask,
|
||||
'position': spec.position,
|
||||
'outgoing_sequence_flows': dict(
|
||||
(k, self.convert(v)) for k, v in spec.outgoing_sequence_flows.items()
|
||||
),
|
||||
'outgoing_sequence_flows_by_id': dict(
|
||||
(k, self.convert(v)) for k, v in spec.outgoing_sequence_flows_by_id.items()
|
||||
),
|
||||
'data_input_associations': [ self.convert(obj) for obj in spec.data_input_associations ],
|
||||
'data_output_associations': [ self.convert(obj) for obj in spec.data_output_associations ],
|
||||
}
|
||||
@ -224,8 +217,6 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
||||
spec.documentation = dct.pop('documentation', None)
|
||||
spec.lane = dct.pop('lane', None)
|
||||
spec.loopTask = dct.pop('loopTask', False)
|
||||
spec.outgoing_sequence_flows = self.restore(dct.pop('outgoing_sequence_flows', {}))
|
||||
spec.outgoing_sequence_flows_by_id = self.restore(dct.pop('outgoing_sequence_flows_by_id', {}))
|
||||
spec.data_input_associations = self.restore(dct.pop('data_input_associations', []))
|
||||
spec.data_output_associations = self.restore(dct.pop('data_output_associations', []))
|
||||
|
||||
@ -283,17 +274,6 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
||||
event_definition.external = external
|
||||
return event_definition
|
||||
|
||||
def sequence_flow_to_dict(self, flow):
|
||||
return {
|
||||
'id': flow.id,
|
||||
'name': flow.name,
|
||||
'documentation': flow.documentation,
|
||||
'target_task_spec': flow.target_task_spec.name
|
||||
}
|
||||
|
||||
def sequence_flow_from_dict(self, dct):
|
||||
return SequenceFlow(**dct)
|
||||
|
||||
def attrib_to_dict(self, attrib):
|
||||
return { 'name': attrib.name }
|
||||
|
||||
|
@ -1,259 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from ...camunda.specs.UserTask import UserTask
|
||||
from ...dmn.engine.DMNEngine import DMNEngine
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...dmn.specs.model import DecisionTable
|
||||
from ...serializer.dict import DictionarySerializer
|
||||
from ...util.impl import get_class
|
||||
from ..specs.BpmnSpecMixin import SequenceFlow
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ..specs.MultiInstanceTask import MultiInstanceTask
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.SubWorkflowTask import SubWorkflowTask
|
||||
|
||||
|
||||
class BPMNDictionarySerializer(DictionarySerializer):
|
||||
|
||||
def serialize_task_spec(self, spec):
|
||||
s_state = super().serialize_task_spec(spec)
|
||||
|
||||
if hasattr(spec,'documentation'):
|
||||
s_state['documentation'] = spec.documentation
|
||||
if hasattr(spec,'extensions'):
|
||||
s_state['extensions'] = self.serialize_dict(spec.extensions)
|
||||
if hasattr(spec,'lane'):
|
||||
s_state['lane'] = spec.lane
|
||||
|
||||
if hasattr(spec,'outgoing_sequence_flows'):
|
||||
s_state['outgoing_sequence_flows'] = {x:spec.outgoing_sequence_flows[x].serialize() for x in
|
||||
spec.outgoing_sequence_flows.keys()}
|
||||
s_state['outgoing_sequence_flows_by_id'] = {x:spec.outgoing_sequence_flows_by_id[x].serialize() for x in
|
||||
spec.outgoing_sequence_flows_by_id.keys()}
|
||||
|
||||
# Note: Events are not serialized; this is documented in
|
||||
# the TaskSpec API docs.
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_task_spec(self, wf_spec, s_state, spec):
|
||||
spec = super().deserialize_task_spec(wf_spec, s_state, spec)
|
||||
# I would use the s_state.get('extensions',{}) inside of the deserialize
|
||||
# but many tasks have no extensions on them.
|
||||
if s_state.get('extensions',None) != None:
|
||||
spec.extensions = self.deserialize_dict(s_state['extensions'])
|
||||
if 'documentation' in s_state.keys():
|
||||
spec.documentation = s_state['documentation']
|
||||
|
||||
if 'lane' in s_state.keys():
|
||||
spec.lane = s_state.get('lane',None)
|
||||
if s_state.get('outgoing_sequence_flows',None):
|
||||
spec.outgoing_sequence_flows = s_state.get('outgoing_sequence_flows', {})
|
||||
spec.outgoing_sequence_flows_by_id = s_state.get('outgoing_sequence_flows_by_id', {})
|
||||
|
||||
return spec
|
||||
|
||||
def serialize_exclusive_gateway(self, spec):
|
||||
s_state = self.serialize_multi_choice(spec)
|
||||
s_state['default_task_spec'] = spec.default_task_spec
|
||||
return s_state
|
||||
|
||||
def deserialize_exclusive_gateway(self, wf_spec, s_state):
|
||||
spec = ExclusiveGateway(wf_spec, s_state['name'])
|
||||
self.deserialize_multi_choice(wf_spec, s_state, spec=spec)
|
||||
spec.default_task_spec = s_state['default_task_spec']
|
||||
return spec
|
||||
|
||||
def serialize_script_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['script'] = spec.script
|
||||
return s_state
|
||||
|
||||
def deserialize_script_task(self, wf_spec, s_state):
|
||||
spec = ScriptTask(wf_spec, s_state['name'], s_state['script'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_subworkflow_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['wf_class'] = spec.wf_class.__module__ + "." + spec.wf_class.__name__
|
||||
s_state['spec'] = self.serialize_workflow_spec(spec.spec)
|
||||
return s_state
|
||||
|
||||
def deserialize_subworkflow_task(self, wf_spec, s_state, cls):
|
||||
spec = cls(wf_spec, s_state['name'])
|
||||
spec.wf_class = get_class(s_state['wf_class'])
|
||||
if 'spec_name' in s_state:
|
||||
s_state['spec'] = self.SPEC_STATES[s_state['spec_name']]
|
||||
spec.spec = self.deserialize_workflow_spec(s_state['spec'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_generic_event(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
if spec.event_definition:
|
||||
s_state['event_definition'] = spec.event_definition.serialize()
|
||||
else:
|
||||
s_state['event_definition'] = None
|
||||
return s_state
|
||||
|
||||
def deserialize_generic_event(self, wf_spec, s_state, cls):
|
||||
if s_state.get('event_definition',None):
|
||||
evtcls = get_class(s_state['event_definition']['classname'])
|
||||
event = evtcls.deserialize(s_state['event_definition'])
|
||||
else:
|
||||
event = None
|
||||
spec = cls(wf_spec, s_state['name'], event)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_boundary_event_parent(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['main_child_task_spec'] = spec.main_child_task_spec.id
|
||||
return s_state
|
||||
|
||||
def deserialize_boundary_event_parent(self, wf_spec, s_state, cls):
|
||||
|
||||
main_child_task_spec = wf_spec.get_task_spec_from_id(s_state['main_child_task_spec'])
|
||||
spec = cls(wf_spec, s_state['name'], main_child_task_spec)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_boundary_event(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
if spec.cancel_activity:
|
||||
s_state['cancel_activity'] = spec.cancel_activity
|
||||
else:
|
||||
s_state['cancel_activity'] = None
|
||||
if spec.event_definition:
|
||||
s_state['event_definition'] = spec.event_definition.serialize()
|
||||
else:
|
||||
s_state['event_definition'] = None
|
||||
return s_state
|
||||
|
||||
def deserialize_boundary_event(self, wf_spec, s_state, cls):
|
||||
cancel_activity = s_state.get('cancel_activity',None)
|
||||
if s_state['event_definition']:
|
||||
eventclass = get_class(s_state['event_definition']['classname'])
|
||||
event = eventclass.deserialize(s_state['event_definition'])
|
||||
else:
|
||||
event = None
|
||||
spec = cls(wf_spec, s_state['name'], cancel_activity=cancel_activity,event_definition=event)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_user_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['form'] = spec.form
|
||||
return s_state
|
||||
|
||||
def deserialize_user_task(self, wf_spec, s_state):
|
||||
spec = UserTask(wf_spec, s_state['name'], s_state['form'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
|
||||
def serialize_business_rule_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
dictrep = spec.dmnEngine.decision_table.serialize()
|
||||
# future
|
||||
s_state['dmn'] = dictrep
|
||||
return s_state
|
||||
|
||||
def deserialize_business_rule_task(self, wf_spec, s_state):
|
||||
dt = DecisionTable(None, None, None)
|
||||
dt.deserialize(s_state['dmn'])
|
||||
dmn_engine = DMNEngine(dt)
|
||||
spec = BusinessRuleTask(wf_spec, s_state['name'], dmn_engine)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_multi_instance(self, spec):
|
||||
s_state = super().serialize_multi_instance(spec)
|
||||
# here we need to add in all of the things that would get serialized
|
||||
# for other classes that the MultiInstance could be -
|
||||
#
|
||||
if hasattr(spec,'form'):
|
||||
s_state['form'] = spec.form
|
||||
|
||||
if isinstance(spec,MultiInstanceTask):
|
||||
s_state['collection'] = self.serialize_arg(spec.collection)
|
||||
s_state['elementVar'] = self.serialize_arg(spec.elementVar)
|
||||
s_state['completioncondition'] = self.serialize_arg(spec.completioncondition)
|
||||
s_state['isSequential'] = self.serialize_arg(spec.isSequential)
|
||||
s_state['loopTask'] = self.serialize_arg(spec.loopTask)
|
||||
if (hasattr(spec,'expanded')):
|
||||
s_state['expanded'] = self.serialize_arg(spec.expanded)
|
||||
if isinstance(spec,BusinessRuleTask):
|
||||
br_state = self.serialize_business_rule_task(spec)
|
||||
s_state['dmn'] = br_state['dmn']
|
||||
if isinstance(spec, ScriptTask):
|
||||
br_state = self.serialize_script_task(spec)
|
||||
s_state['script'] = br_state['script']
|
||||
if isinstance(spec, SubWorkflowTask):
|
||||
br_state = self.serialize_subworkflow(spec)
|
||||
s_state['wf_class'] = br_state['wf_class']
|
||||
s_state['spec'] = br_state['spec']
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_multi_instance(self, wf_spec, s_state, cls=None):
|
||||
cls = super().deserialize_multi_instance(wf_spec, s_state, cls)
|
||||
if isinstance(cls,MultiInstanceTask):
|
||||
cls.isSequential = self.deserialize_arg(s_state['isSequential'])
|
||||
cls.loopTask = self.deserialize_arg(s_state['loopTask'])
|
||||
cls.elementVar = self.deserialize_arg(s_state['elementVar'])
|
||||
cls.completioncondition = self.deserialize_arg(s_state['completioncondition'])
|
||||
cls.collection = self.deserialize_arg(s_state['collection'])
|
||||
if s_state.get('expanded',None):
|
||||
cls.expanded = self.deserialize_arg(s_state['expanded'])
|
||||
if isinstance(cls,BusinessRuleTask):
|
||||
dt = DecisionTable(None,None,None)
|
||||
dt.deserialize(s_state['dmn'])
|
||||
dmn_engine = DMNEngine(dt)
|
||||
cls.dmnEngine=dmn_engine
|
||||
if isinstance(cls, ScriptTask):
|
||||
cls.script = s_state['script']
|
||||
if isinstance(cls, SubWorkflowTask):
|
||||
cls.wf_class = get_class(s_state['wf_class'])
|
||||
cls.spec = self.deserialize_workflow_spec(s_state['spec'])
|
||||
|
||||
if s_state.get('form',None):
|
||||
cls.form = s_state['form']
|
||||
|
||||
return cls
|
||||
|
||||
def _deserialize_workflow_spec_task_spec(self, spec, task_spec, name):
|
||||
if hasattr(task_spec,'outgoing_sequence_flows'):
|
||||
for entry,value in task_spec.outgoing_sequence_flows.items():
|
||||
task_spec.outgoing_sequence_flows[entry] = \
|
||||
SequenceFlow(value['id'],
|
||||
value['name'],
|
||||
value['documentation'],
|
||||
spec.get_task_spec_from_id(value['target_task_spec']))
|
||||
for entry, value in task_spec.outgoing_sequence_flows_by_id.items():
|
||||
task_spec.outgoing_sequence_flows_by_id[entry] = \
|
||||
SequenceFlow(value['id'],
|
||||
value['name'],
|
||||
value['documentation'],
|
||||
spec.get_task_spec_from_id(value['target_task_spec']))
|
||||
super()._deserialize_workflow_spec_task_spec(spec, task_spec, name)
|
||||
|
||||
def _prevtaskclass_bases(self, oldtask):
|
||||
return (MultiInstanceTask, oldtask)
|
@ -1,35 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import json
|
||||
from ..serializer.dict import BPMNDictionarySerializer
|
||||
from ...camunda.specs.UserTask import Form
|
||||
from ...serializer.json import JSONSerializer
|
||||
|
||||
class BPMNJSONSerializer(BPMNDictionarySerializer, JSONSerializer):
|
||||
|
||||
def _object_hook(self, dct):
|
||||
if '__form__' in dct:
|
||||
return Form(init=json.loads(dct['__form__']))
|
||||
|
||||
return super()._object_hook(dct)
|
||||
|
||||
def _default(self, obj):
|
||||
if isinstance(obj,Form):
|
||||
return {'__form__': json.dumps(obj, default=lambda o:
|
||||
self._jsonableHandler(o))}
|
||||
|
||||
return super()._default(obj)
|
@ -138,9 +138,9 @@ class BpmnWorkflowSerializer:
|
||||
dct = json.loads(serialization, cls=self.json_decoder_cls)
|
||||
return dct
|
||||
|
||||
def deserialize_json(self, serialization, read_only=False, use_gzip=False):
|
||||
def deserialize_json(self, serialization, use_gzip=False):
|
||||
dct = self.__get_dict(serialization, use_gzip)
|
||||
return self.workflow_from_dict(dct, read_only)
|
||||
return self.workflow_from_dict(dct)
|
||||
|
||||
def get_version(self, serialization, use_gzip=False):
|
||||
try:
|
||||
@ -171,11 +171,10 @@ class BpmnWorkflowSerializer:
|
||||
dct['bpmn_messages'] = [self.message_to_dict(msg) for msg in workflow.bpmn_messages]
|
||||
return dct
|
||||
|
||||
def workflow_from_dict(self, dct, read_only=False):
|
||||
def workflow_from_dict(self, dct):
|
||||
"""Create a workflow based on a dictionary representation.
|
||||
|
||||
:param dct: the dictionary representation
|
||||
:param read_only: optionally disable modifying the workflow
|
||||
|
||||
Returns:
|
||||
a BPMN Workflow object
|
||||
@ -195,7 +194,7 @@ class BpmnWorkflowSerializer:
|
||||
subprocess_specs[name] = self.spec_converter.restore(wf_dct)
|
||||
|
||||
# Create the top-level workflow
|
||||
workflow = self.wf_class(spec, subprocess_specs, read_only=read_only, deserializing=True)
|
||||
workflow = self.wf_class(spec, subprocess_specs, deserializing=True)
|
||||
|
||||
# Restore any unretrieve messages
|
||||
workflow.bpmn_messages = [ self.message_from_dict(msg) for msg in dct.get('bpmn_messages', []) ]
|
||||
@ -256,7 +255,7 @@ class BpmnWorkflowSerializer:
|
||||
|
||||
if isinstance(task_spec, SubWorkflowTask) and task_id in top_dct.get('subprocesses', {}):
|
||||
subprocess_spec = top.subprocess_specs[task_spec.spec]
|
||||
subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, read_only=top.read_only)
|
||||
subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process)
|
||||
subprocess_dct = top_dct['subprocesses'].get(task_id, {})
|
||||
subprocess.data = self.data_converter.restore(subprocess_dct.pop('data'))
|
||||
subprocess.success = subprocess_dct.pop('success')
|
||||
|
@ -2,7 +2,6 @@ from .bpmn_converters import BpmnWorkflowSpecConverter
|
||||
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||
from ..specs.MultiInstanceTask import MultiInstanceTask, getDynamicMIClass
|
||||
from ..specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent
|
||||
|
||||
from ...operators import Attrib, PathAttrib
|
||||
@ -167,11 +166,6 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
|
||||
|
||||
# Now we have to go back and fix all the circular references to everything
|
||||
for task_spec in spec.task_specs.values():
|
||||
if isinstance(task_spec, BpmnSpecMixin):
|
||||
for flow in task_spec.outgoing_sequence_flows.values():
|
||||
flow.target_task_spec = spec.get_task_spec_from_name(flow.target_task_spec)
|
||||
for flow in task_spec.outgoing_sequence_flows_by_id.values():
|
||||
flow.target_task_spec = spec.get_task_spec_from_name(flow.target_task_spec)
|
||||
if isinstance(task_spec, _BoundaryEventParent):
|
||||
task_spec.main_child_task_spec = spec.get_task_spec_from_name(task_spec.main_child_task_spec)
|
||||
task_spec.inputs = [ spec.get_task_spec_from_name(name) for name in task_spec.inputs ]
|
||||
|
@ -57,13 +57,6 @@ class _EndJoin(UnstructuredJoin):
|
||||
super(_EndJoin, self)._on_complete_hook(my_task)
|
||||
my_task.workflow.data.update(my_task.data)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_join(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_join(wf_spec, s_state, _EndJoin)
|
||||
|
||||
|
||||
class BpmnDataSpecification:
|
||||
|
||||
|
@ -17,7 +17,6 @@
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from ...task import TaskState
|
||||
from ...operators import Operator
|
||||
from ...specs.base import TaskSpec
|
||||
|
||||
@ -33,27 +32,6 @@ class _BpmnCondition(Operator):
|
||||
return task.workflow.script_engine.evaluate(task, self.args[0])
|
||||
|
||||
|
||||
class SequenceFlow(object):
|
||||
|
||||
"""
|
||||
Keeps information relating to a sequence flow
|
||||
"""
|
||||
|
||||
def __init__(self, id, name, documentation, target_task_spec):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name.strip() if name else name
|
||||
self.documentation = documentation
|
||||
self.target_task_spec = target_task_spec
|
||||
|
||||
def serialize(self):
|
||||
return {'id':self.id,
|
||||
'name':self.name,
|
||||
'documentation':self.documentation,
|
||||
'target_task_spec':self.target_task_spec.id}
|
||||
|
||||
|
||||
class BpmnSpecMixin(TaskSpec):
|
||||
"""
|
||||
@ -69,8 +47,6 @@ class BpmnSpecMixin(TaskSpec):
|
||||
(optional).
|
||||
"""
|
||||
super(BpmnSpecMixin, self).__init__(wf_spec, name, **kwargs)
|
||||
self.outgoing_sequence_flows = {}
|
||||
self.outgoing_sequence_flows_by_id = {}
|
||||
self.lane = lane
|
||||
self.position = position or {'x': 0, 'y': 0}
|
||||
self.loopTask = False
|
||||
@ -88,105 +64,13 @@ class BpmnSpecMixin(TaskSpec):
|
||||
"""
|
||||
return self.loopTask
|
||||
|
||||
def connect_outgoing(self, taskspec, sequence_flow_id, sequence_flow_name,
|
||||
documentation):
|
||||
"""
|
||||
Connect this task spec to the indicated child.
|
||||
|
||||
:param sequence_flow_id: The ID of the connecting sequenceFlow node.
|
||||
|
||||
:param sequence_flow_name: The name of the connecting sequenceFlow
|
||||
node.
|
||||
"""
|
||||
self.connect(taskspec)
|
||||
s = SequenceFlow(
|
||||
sequence_flow_id, sequence_flow_name, documentation, taskspec)
|
||||
self.outgoing_sequence_flows[taskspec.name] = s
|
||||
self.outgoing_sequence_flows_by_id[sequence_flow_id] = s
|
||||
|
||||
def connect_outgoing_if(self, condition, taskspec, sequence_flow_id,
|
||||
sequence_flow_name, documentation):
|
||||
def connect_outgoing_if(self, condition, taskspec):
|
||||
"""
|
||||
Connect this task spec to the indicated child, if the condition
|
||||
evaluates to true. This should only be called if the task has a
|
||||
connect_if method (e.g. ExclusiveGateway).
|
||||
|
||||
:param sequence_flow_id: The ID of the connecting sequenceFlow node.
|
||||
|
||||
:param sequence_flow_name: The name of the connecting sequenceFlow
|
||||
node.
|
||||
"""
|
||||
self.connect_if(_BpmnCondition(condition), taskspec)
|
||||
s = SequenceFlow(
|
||||
sequence_flow_id, sequence_flow_name, documentation, taskspec)
|
||||
self.outgoing_sequence_flows[taskspec.name] = s
|
||||
self.outgoing_sequence_flows_by_id[sequence_flow_id] = s
|
||||
|
||||
def get_outgoing_sequence_flow_by_spec(self, task_spec):
|
||||
"""
|
||||
Returns the outgoing SequenceFlow targeting the specified task_spec.
|
||||
"""
|
||||
return self.outgoing_sequence_flows[task_spec.name]
|
||||
|
||||
def get_outgoing_sequence_flow_by_id(self, id):
|
||||
"""
|
||||
Returns the outgoing SequenceFlow with the specified ID.
|
||||
"""
|
||||
return self.outgoing_sequence_flows_by_id[id]
|
||||
|
||||
def has_outgoing_sequence_flow(self, id):
|
||||
"""
|
||||
Returns true if the SequenceFlow with the specified ID is leaving this
|
||||
task.
|
||||
"""
|
||||
return id in self.outgoing_sequence_flows_by_id
|
||||
|
||||
def get_outgoing_sequence_names(self):
|
||||
"""
|
||||
Returns a list of the names of outgoing sequences. Some may be None.
|
||||
"""
|
||||
return sorted([s.name for s in
|
||||
list(self.outgoing_sequence_flows_by_id.values())])
|
||||
|
||||
def get_outgoing_sequences(self):
|
||||
"""
|
||||
Returns a list of outgoing sequences. Some may be None.
|
||||
"""
|
||||
return iter(list(self.outgoing_sequence_flows_by_id.values()))
|
||||
|
||||
# Hooks for Custom BPMN tasks ##########
|
||||
|
||||
def entering_waiting_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the WAITING state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def entering_ready_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the READY state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def entering_complete_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the COMPLETE state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def entering_cancelled_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the CANCELLED state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
super()._on_ready_hook(my_task)
|
||||
@ -205,26 +89,6 @@ class BpmnSpecMixin(TaskSpec):
|
||||
super(BpmnSpecMixin, self)._on_complete_hook(my_task)
|
||||
if isinstance(my_task.parent.task_spec, BpmnSpecMixin):
|
||||
my_task.parent.task_spec._child_complete_hook(my_task)
|
||||
if not my_task.workflow._is_busy_with_restore():
|
||||
self.entering_complete_state(my_task)
|
||||
|
||||
def _child_complete_hook(self, child_task):
|
||||
pass
|
||||
|
||||
def _on_cancel(self, my_task):
|
||||
super(BpmnSpecMixin, self)._on_cancel(my_task)
|
||||
my_task.workflow._task_cancelled_notify(my_task)
|
||||
if not my_task.workflow._is_busy_with_restore():
|
||||
self.entering_cancelled_state(my_task)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
prev_state = my_task.state
|
||||
super(BpmnSpecMixin, self)._update_hook(my_task)
|
||||
if (prev_state != TaskState.WAITING and my_task.state == TaskState.WAITING and
|
||||
not my_task.workflow._is_busy_with_restore()):
|
||||
self.entering_waiting_state(my_task)
|
||||
|
||||
def _on_ready_before_hook(self, my_task):
|
||||
super(BpmnSpecMixin, self)._on_ready_before_hook(my_task)
|
||||
if not my_task.workflow._is_busy_with_restore():
|
||||
self.entering_ready_state(my_task)
|
||||
|
@ -41,22 +41,14 @@ class ExclusiveGateway(ExclusiveChoice, BpmnSpecMixin):
|
||||
# raise WorkflowException(self, 'At least one output required.')
|
||||
for condition, name in self.cond_task_specs:
|
||||
if name is None:
|
||||
raise WorkflowException(self, 'Condition with no task spec.')
|
||||
raise WorkflowException('Condition with no task spec.', task_spec=self)
|
||||
task_spec = self._wf_spec.get_task_spec_from_name(name)
|
||||
if task_spec is None:
|
||||
msg = 'Condition leads to non-existent task ' + repr(name)
|
||||
raise WorkflowException(self, msg)
|
||||
raise WorkflowException(msg, task_spec=self)
|
||||
if condition is None:
|
||||
continue
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Exclusive Gateway'
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_exclusive_gateway(self)
|
||||
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_exclusive_gateway(wf_spec, s_state)
|
||||
|
@ -117,7 +117,3 @@ class InclusiveGateway(UnstructuredJoin):
|
||||
done.add(child)
|
||||
q.append(child)
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, InclusiveGateway)
|
||||
|
@ -23,10 +23,6 @@ from ...specs.Simple import Simple
|
||||
|
||||
class ManualTask(Simple, BpmnSpecMixin):
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, ManualTask)
|
||||
|
||||
def is_engine_task(self):
|
||||
return False
|
||||
|
||||
|
@ -22,12 +22,12 @@ from builtins import range
|
||||
from uuid import uuid4
|
||||
import re
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from .SubWorkflowTask import SubWorkflowTask, CallActivity
|
||||
from .ParallelGateway import ParallelGateway
|
||||
from .ScriptTask import ScriptTask
|
||||
from .ExclusiveGateway import ExclusiveGateway
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...exceptions import WorkflowTaskException
|
||||
from ...operators import valueof, is_number
|
||||
from ...specs.SubWorkflow import SubWorkflow
|
||||
from ...specs.base import TaskSpec
|
||||
@ -396,9 +396,10 @@ class MultiInstanceTask(TaskSpec):
|
||||
# look for variable in context, if we don't find it, default to 1
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if self.times.name == self.collection.name and type(variable) == type([]):
|
||||
raise WorkflowTaskExecException(my_task,
|
||||
'If we are updating a collection, then the collection must be a dictionary.')
|
||||
|
||||
raise WorkflowTaskException(
|
||||
'If we are updating a collection, then the collection must be a dictionary.',
|
||||
task=my_task)
|
||||
|
||||
def _get_current_var(self, my_task, pos):
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if is_number(variable):
|
||||
@ -418,7 +419,7 @@ class MultiInstanceTask(TaskSpec):
|
||||
msg = f"There is a mismatch between runtimes and the number " \
|
||||
f"items in the collection, please check for empty " \
|
||||
f"collection {self.collection.name}."
|
||||
raise WorkflowTaskExecException(my_task, msg)
|
||||
raise WorkflowTaskException(msg, task=my_task)
|
||||
runtimesvar = keys[runtimes - 1]
|
||||
else:
|
||||
# Use an integer (for arrays)
|
||||
@ -477,18 +478,6 @@ class MultiInstanceTask(TaskSpec):
|
||||
if not isinstance(my_task.task_spec,SubWorkflowTask):
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
|
||||
def serialize(self, serializer):
|
||||
|
||||
return serializer.serialize_multi_instance(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
prevclass = get_class(s_state['prevtaskclass'])
|
||||
spec = getDynamicMIClass(s_state['name'], prevclass)(wf_spec,s_state['name'],s_state['times'])
|
||||
spec.prevtaskclass = s_state['prevtaskclass']
|
||||
|
||||
return serializer.deserialize_multi_instance(wf_spec, s_state, spec)
|
||||
|
||||
|
||||
def getDynamicMIClass(id,prevclass):
|
||||
id = re.sub('(.+)_[0-9]$','\\1',id)
|
||||
|
@ -29,7 +29,3 @@ class NoneTask(Simple, BpmnSpecMixin):
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Task'
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, NoneTask)
|
||||
|
@ -50,7 +50,3 @@ class ParallelGateway(UnstructuredJoin):
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Parallel Gateway'
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, ParallelGateway)
|
||||
|
@ -37,13 +37,6 @@ class ScriptEngineTask(Simple, BpmnSpecMixin):
|
||||
task._set_state(TaskState.WAITING)
|
||||
raise exc
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_script_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_script_task(wf_spec, s_state)
|
||||
|
||||
|
||||
class ScriptTask(ScriptEngineTask):
|
||||
|
||||
|
@ -90,13 +90,6 @@ class SubWorkflowTask(BpmnSpecMixin):
|
||||
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_subworkflow_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_subworkflow_task(wf_spec, s_state, SubWorkflowTask)
|
||||
|
||||
def task_will_set_children_future(self, my_task):
|
||||
my_task.workflow.delete_subprocess(my_task)
|
||||
|
||||
@ -110,9 +103,6 @@ class CallActivity(SubWorkflowTask):
|
||||
def spec_type(self):
|
||||
return 'Call Activity'
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_subworkflow_task(wf_spec, s_state, CallActivity)
|
||||
|
||||
class TransactionSubprocess(SubWorkflowTask):
|
||||
|
||||
@ -123,6 +113,3 @@ class TransactionSubprocess(SubWorkflowTask):
|
||||
def spec_type(self):
|
||||
return 'Transactional Subprocess'
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_subworkflow_task(wf_spec, s_state, TransactionSubprocess)
|
||||
|
@ -56,9 +56,8 @@ class UnstructuredJoin(Join, BpmnSpecMixin):
|
||||
task._has_state(TaskState.WAITING) or task == my_task):
|
||||
if task.parent.task_spec in completed_inputs:
|
||||
raise(WorkflowException
|
||||
(task.task_spec,
|
||||
"Unsupported looping behaviour: two threads waiting"
|
||||
" on the same sequence flow."))
|
||||
("Unsupported looping behaviour: two threads waiting"
|
||||
" on the same sequence flow.", task_spec=self))
|
||||
completed_inputs.add(task.parent.task_spec)
|
||||
else:
|
||||
waiting_tasks.append(task.parent)
|
||||
|
@ -62,10 +62,3 @@ class EndEvent(ThrowingEvent):
|
||||
|
||||
elif isinstance(self.event_definition, CancelEventDefinition):
|
||||
my_task.workflow.cancel()
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, EndEvent)
|
||||
|
@ -104,12 +104,6 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin):
|
||||
if child.task_spec == self.main_child_task_spec:
|
||||
child._set_state(state)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_boundary_event_parent(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_boundary_event_parent(wf_spec, s_state, cls)
|
||||
|
||||
|
||||
class BoundaryEvent(CatchingEvent):
|
||||
@ -142,13 +136,6 @@ class BoundaryEvent(CatchingEvent):
|
||||
# Notify the boundary event parent as well.
|
||||
my_task.parent.task_spec._child_complete_hook(my_task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_boundary_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_boundary_event(wf_spec, s_state, cls)
|
||||
|
||||
|
||||
class EventBasedGateway(CatchingEvent):
|
||||
|
||||
|
@ -40,11 +40,3 @@ class StartEvent(CatchingEvent):
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
super(StartEvent, self).catch(my_task, event_definition)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, StartEvent)
|
||||
|
||||
|
@ -78,21 +78,6 @@ class EventDefinition(object):
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__
|
||||
|
||||
def serialize(self):
|
||||
return {
|
||||
'classname': self.__class__.__module__ + '.' + self.__class__.__name__,
|
||||
'internal': self.internal,
|
||||
'external': self.external,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, dct):
|
||||
dct.pop('classname')
|
||||
internal, external = dct.pop('internal'), dct.pop('external')
|
||||
obj = cls(**dct)
|
||||
obj.internal, obj.external = internal, external
|
||||
return obj
|
||||
|
||||
|
||||
class NamedEventDefinition(EventDefinition):
|
||||
"""
|
||||
@ -112,10 +97,6 @@ class NamedEventDefinition(EventDefinition):
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.name == other.name
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(NamedEventDefinition, self).serialize()
|
||||
retdict['name'] = self.name
|
||||
return retdict
|
||||
|
||||
class CancelEventDefinition(EventDefinition):
|
||||
"""
|
||||
@ -149,10 +130,6 @@ class ErrorEventDefinition(NamedEventDefinition):
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.error_code in [ None, other.error_code ]
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(ErrorEventDefinition, self).serialize()
|
||||
retdict['error_code'] = self.error_code
|
||||
return retdict
|
||||
|
||||
class EscalationEventDefinition(NamedEventDefinition):
|
||||
"""
|
||||
@ -177,11 +154,6 @@ class EscalationEventDefinition(NamedEventDefinition):
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.escalation_code in [ None, other.escalation_code ]
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(EscalationEventDefinition, self).serialize()
|
||||
retdict['escalation_code'] = self.escalation_code
|
||||
return retdict
|
||||
|
||||
|
||||
class CorrelationProperty:
|
||||
"""Rules for generating a correlation key when a message is sent or received."""
|
||||
@ -339,12 +311,6 @@ class TimerEventDefinition(EventDefinition):
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(TimerEventDefinition, self).serialize()
|
||||
retdict['label'] = self.label
|
||||
retdict['dateTime'] = self.dateTime
|
||||
return retdict
|
||||
|
||||
|
||||
class CycleTimerEventDefinition(EventDefinition):
|
||||
"""
|
||||
@ -409,12 +375,6 @@ class CycleTimerEventDefinition(EventDefinition):
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(CycleTimerEventDefinition, self).serialize()
|
||||
retdict['label'] = self.label
|
||||
retdict['cycle_definition'] = self.cycle_definition
|
||||
return retdict
|
||||
|
||||
|
||||
class MultipleEventDefinition(EventDefinition):
|
||||
|
||||
|
@ -72,13 +72,6 @@ class CatchingEvent(Simple, BpmnSpecMixin):
|
||||
self.event_definition.reset(my_task)
|
||||
super(CatchingEvent, self)._on_complete_hook(my_task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, cls)
|
||||
|
||||
|
||||
class ThrowingEvent(Simple, BpmnSpecMixin):
|
||||
"""Base Task Spec for Throwing Event nodes."""
|
||||
@ -95,10 +88,3 @@ class ThrowingEvent(Simple, BpmnSpecMixin):
|
||||
def _on_complete_hook(self, my_task):
|
||||
super(ThrowingEvent, self)._on_complete_hook(my_task)
|
||||
self.event_definition.throw(my_task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, cls)
|
||||
|
@ -41,22 +41,14 @@ class BpmnWorkflow(Workflow):
|
||||
Spiff Workflow class with a few extra methods and attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, top_level_spec, subprocess_specs=None, name=None, script_engine=None,
|
||||
read_only=False, **kwargs):
|
||||
def __init__(self, top_level_spec, subprocess_specs=None, name=None, script_engine=None, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param script_engine: set to an extension of PythonScriptEngine if you
|
||||
need a specialised version. Defaults to the script engine of the top
|
||||
most workflow, or to the PythonScriptEngine if none is provided.
|
||||
|
||||
:param read_only: If this parameter is set then the workflow state
|
||||
cannot change. It can only be queried to find out about the current
|
||||
state. This is used in conjunction with the CompactWorkflowSerializer
|
||||
to provide read only access to a previously saved workflow.
|
||||
"""
|
||||
self._busy_with_restore = False
|
||||
# THIS IS THE LINE THAT LOGS
|
||||
super(BpmnWorkflow, self).__init__(top_level_spec, **kwargs)
|
||||
self.name = name or top_level_spec.name
|
||||
self.subprocess_specs = subprocess_specs or {}
|
||||
@ -64,7 +56,6 @@ class BpmnWorkflow(Workflow):
|
||||
self.bpmn_messages = []
|
||||
self.correlations = {}
|
||||
self.__script_engine = script_engine or PythonScriptEngine()
|
||||
self.read_only = read_only
|
||||
|
||||
@property
|
||||
def script_engine(self):
|
||||
@ -82,7 +73,6 @@ class BpmnWorkflow(Workflow):
|
||||
workflow = self._get_outermost_workflow(my_task)
|
||||
subprocess = BpmnWorkflow(
|
||||
workflow.subprocess_specs[spec_name], name=name,
|
||||
read_only=self.read_only,
|
||||
script_engine=self.script_engine,
|
||||
parent=my_task.workflow)
|
||||
workflow.subprocesses[my_task.id] = subprocess
|
||||
@ -134,8 +124,6 @@ class BpmnWorkflow(Workflow):
|
||||
|
||||
:param event_definition: the thrown event
|
||||
"""
|
||||
assert not self.read_only and not self._is_busy_with_restore()
|
||||
|
||||
# Start a subprocess for known specs with start events that catch this
|
||||
# This is total hypocritical of me given how I've argued that specs should
|
||||
# be immutable, but I see no other way of doing this.
|
||||
@ -180,7 +168,6 @@ class BpmnWorkflow(Workflow):
|
||||
:param will_complete_task: Callback that will be called prior to completing a task
|
||||
:param did_complete_task: Callback that will be called after completing a task
|
||||
"""
|
||||
assert not self.read_only
|
||||
engine_steps = list(
|
||||
[t for t in self.get_tasks(TaskState.READY)
|
||||
if self._is_engine_task(t.task_spec)])
|
||||
@ -207,7 +194,6 @@ class BpmnWorkflow(Workflow):
|
||||
:param will_refresh_task: Callback that will be called prior to refreshing a task
|
||||
:param did_refresh_task: Callback that will be called after refreshing a task
|
||||
"""
|
||||
assert not self.read_only
|
||||
for my_task in self.get_tasks(TaskState.WAITING):
|
||||
if will_refresh_task is not None:
|
||||
will_refresh_task(my_task)
|
||||
@ -232,12 +218,11 @@ class BpmnWorkflow(Workflow):
|
||||
|
||||
def _find_task(self, task_id):
|
||||
if task_id is None:
|
||||
raise WorkflowException(self.spec, 'task_id is None')
|
||||
raise WorkflowException('task_id is None', task_spec=self.spec)
|
||||
for task in self.get_tasks():
|
||||
if task.id == task_id:
|
||||
return task
|
||||
raise WorkflowException(self.spec,
|
||||
f'A task with the given task_id ({task_id}) was not found')
|
||||
raise WorkflowException(f'A task with the given task_id ({task_id}) was not found', task_spec=self.spec)
|
||||
|
||||
def complete_task_from_id(self, task_id):
|
||||
# I don't even know why we use this stupid function instead of calling task.complete,
|
||||
@ -252,9 +237,7 @@ class BpmnWorkflow(Workflow):
|
||||
return task.reset_token(data)
|
||||
|
||||
def get_ready_user_tasks(self,lane=None):
|
||||
"""
|
||||
Returns a list of User Tasks that are READY for user action
|
||||
"""
|
||||
"""Returns a list of User Tasks that are READY for user action"""
|
||||
if lane is not None:
|
||||
return [t for t in self.get_tasks(TaskState.READY)
|
||||
if (not self._is_engine_task(t.task_spec))
|
||||
@ -264,26 +247,14 @@ class BpmnWorkflow(Workflow):
|
||||
if not self._is_engine_task(t.task_spec)]
|
||||
|
||||
def get_waiting_tasks(self):
|
||||
"""
|
||||
Returns a list of all WAITING tasks
|
||||
"""
|
||||
"""Returns a list of all WAITING tasks"""
|
||||
return self.get_tasks(TaskState.WAITING)
|
||||
|
||||
def get_catching_tasks(self):
|
||||
return [ task for task in self.get_tasks() if isinstance(task.task_spec, CatchingEvent) ]
|
||||
|
||||
def _is_busy_with_restore(self):
|
||||
if self.outer_workflow == self:
|
||||
return self._busy_with_restore
|
||||
return self.outer_workflow._is_busy_with_restore()
|
||||
|
||||
def _is_engine_task(self, task_spec):
|
||||
return (not hasattr(task_spec, 'is_engine_task') or
|
||||
task_spec.is_engine_task())
|
||||
return (not hasattr(task_spec, 'is_engine_task') or task_spec.is_engine_task())
|
||||
|
||||
def _task_completed_notify(self, task):
|
||||
assert (not self.read_only) or self._is_busy_with_restore()
|
||||
super(BpmnWorkflow, self)._task_completed_notify(task)
|
||||
|
||||
def _task_cancelled_notify(self, task):
|
||||
assert (not self.read_only) or self._is_busy_with_restore()
|
||||
|
@ -44,7 +44,7 @@ class UserTaskParser(TaskParser):
|
||||
"""
|
||||
Base class for parsing User Tasks
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, lane=None):
|
||||
nsmap = DEFAULT_NSMAP.copy()
|
||||
nsmap.update({'camunda': CAMUNDA_MODEL_NS})
|
||||
@ -63,7 +63,7 @@ class UserTaskParser(TaskParser):
|
||||
form = Form()
|
||||
try:
|
||||
form.key = self.node.attrib['{' + CAMUNDA_MODEL_NS + '}formKey']
|
||||
except (KeyError):
|
||||
except KeyError:
|
||||
return form
|
||||
for xml_field in self.xpath('.//camunda:formData/camunda:formField'):
|
||||
if xml_field.get('type') == 'enum':
|
||||
|
@ -6,6 +6,7 @@ from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
|
||||
|
||||
class UserTask(UserTask, BpmnSpecMixin):
|
||||
"""Task Spec for a bpmn:userTask node with Camunda forms."""
|
||||
|
||||
def __init__(self, wf_spec, name, form, **kwargs):
|
||||
"""
|
||||
@ -16,24 +17,12 @@ class UserTask(UserTask, BpmnSpecMixin):
|
||||
super(UserTask, self).__init__(wf_spec, name, **kwargs)
|
||||
self.form = form
|
||||
|
||||
|
||||
"""
|
||||
Task Spec for a bpmn:userTask node.
|
||||
"""
|
||||
|
||||
def _on_trigger(self, my_task):
|
||||
pass
|
||||
|
||||
def is_engine_task(self):
|
||||
return False
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_user_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_user_task(wf_spec, s_state)
|
||||
|
||||
|
||||
class FormField(object):
|
||||
def __init__(self, form_type="text"):
|
||||
@ -66,8 +55,6 @@ class FormField(object):
|
||||
def has_validation(self, name):
|
||||
return self.get_validation(name) is not None
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
class EnumFormField(FormField):
|
||||
def __init__(self):
|
||||
@ -77,17 +64,12 @@ class EnumFormField(FormField):
|
||||
def add_option(self, option_id, name):
|
||||
self.options.append(EnumFormFieldOption(option_id, name))
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class EnumFormFieldOption:
|
||||
def __init__(self, option_id, name):
|
||||
self.id = option_id
|
||||
self.name = name
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class FormFieldProperty:
|
||||
@ -95,18 +77,12 @@ class FormFieldProperty:
|
||||
self.id = property_id
|
||||
self.value = value
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class FormFieldValidation:
|
||||
def __init__(self, name, config):
|
||||
self.name = name
|
||||
self.config = config
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class Form:
|
||||
def __init__(self,init=None):
|
||||
@ -118,9 +94,6 @@ class Form:
|
||||
def add_field(self, field):
|
||||
self.fields.append(field)
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
def from_dict(self,formdict):
|
||||
self.key = formdict['key']
|
||||
for field in formdict['fields']:
|
||||
|
@ -46,9 +46,3 @@ class MessageEventDefinition(MessageEventDefinition):
|
||||
def reset(self, my_task):
|
||||
my_task.internal_data.pop('result_var', None)
|
||||
super(MessageEventDefinition, self).reset(my_task)
|
||||
|
||||
def serialize(self):
|
||||
retdict = super().serialize()
|
||||
retdict['payload'] = self.payload
|
||||
retdict['result_var'] = self.result_var
|
||||
return retdict
|
@ -2,6 +2,7 @@ import logging
|
||||
import re
|
||||
|
||||
from ..specs.model import HitPolicy
|
||||
from ...exceptions import SpiffWorkflowException, WorkflowTaskException
|
||||
from ...util import levenshtein
|
||||
from ...workflow import WorkflowException
|
||||
|
||||
@ -54,18 +55,13 @@ class DMNEngine:
|
||||
try:
|
||||
if not self.evaluate(input_val, lhs, task):
|
||||
return False
|
||||
except NameError as e:
|
||||
# Add a bit of info, re-raise as Name Error
|
||||
raise NameError(str(e) + "Failed to execute "
|
||||
"expression: '%s' is '%s' in the "
|
||||
"Row with annotation '%s'")
|
||||
except WorkflowException as we:
|
||||
raise we
|
||||
except SpiffWorkflowException as se:
|
||||
se.add_note(f"Rule failed on row {rule.row_number}")
|
||||
raise se
|
||||
except Exception as e:
|
||||
raise Exception("Failed to execute "
|
||||
"expression: '%s' is '%s' in the "
|
||||
"Row with annotation '%s', %s" % (
|
||||
input_val, lhs, rule.description, str(e)))
|
||||
error = WorkflowTaskException(str(e), task=task, exception=e)
|
||||
error.add_note(f"Failed to execute DMN Rule on row {rule.row_number}")
|
||||
raise error
|
||||
else:
|
||||
# Empty means ignore decision value
|
||||
continue # Check the other operators/columns
|
||||
@ -111,10 +107,7 @@ class DMNEngine:
|
||||
external_methods=external_methods)
|
||||
|
||||
# The input expression just has to be something that can be parsed as is by the engine.
|
||||
try:
|
||||
script_engine.validate(input_expr)
|
||||
except Exception as e:
|
||||
raise WorkflowException(f"Input Expression '{input_expr}' is malformed. " + str(e))
|
||||
script_engine.validate(input_expr)
|
||||
|
||||
# If we get here, we need to check whether the match expression includes
|
||||
# an operator or if can use '=='
|
||||
|
@ -32,7 +32,7 @@ class BpmnDmnParser(BpmnParser):
|
||||
options = ', '.join(list(self.dmn_parsers.keys()))
|
||||
raise ValidationException(
|
||||
'No DMN Diagram available with id "%s", Available DMN ids are: %s' %(decision_ref, options),
|
||||
node=node, filename='')
|
||||
node=node, file_name='')
|
||||
dmn_parser = self.dmn_parsers[decision_ref]
|
||||
dmn_parser.parse()
|
||||
decision = dmn_parser.decision
|
||||
|
@ -1,6 +1,7 @@
|
||||
import ast
|
||||
|
||||
from SpiffWorkflow.bpmn.parser.node_parser import NodeParser, DEFAULT_NSMAP
|
||||
from ...bpmn.parser.ValidationException import ValidationException
|
||||
|
||||
from ...bpmn.parser.util import xpath_eval
|
||||
|
||||
@ -69,25 +70,20 @@ class DMNParser(NodeParser):
|
||||
def _parse_decision(self, root):
|
||||
decision_elements = list(root)
|
||||
if len(decision_elements) == 0:
|
||||
raise Exception('No decisions found')
|
||||
raise ValidationException('No decisions found', file_name=self.filename,
|
||||
node=root)
|
||||
|
||||
if len(decision_elements) > 1:
|
||||
raise Exception('Multiple decisions found')
|
||||
raise ValidationException('Multiple decision tables are not current supported.',
|
||||
file_name=self.filename, node=root)
|
||||
|
||||
decision_element = decision_elements[0]
|
||||
assert decision_element.tag.endswith(
|
||||
'decision'), 'Element %r is not of type "decision"' % (
|
||||
decision_element.tag)
|
||||
|
||||
decision = Decision(decision_element.attrib['id'],
|
||||
decision_element.attrib.get('name', ''))
|
||||
|
||||
# Parse decision tables
|
||||
try:
|
||||
self._parse_decision_tables(decision, decision_element)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Error in Decision '%s': %s" % (decision.name, str(e)))
|
||||
self._parse_decision_tables(decision, decision_element)
|
||||
|
||||
return decision
|
||||
|
||||
@ -104,6 +100,7 @@ class DMNParser(NodeParser):
|
||||
|
||||
def _parse_inputs_outputs(self, decisionTable,
|
||||
decisionTableElement):
|
||||
rule_counter = 0
|
||||
for element in decisionTableElement:
|
||||
if element.tag.endswith('input'):
|
||||
e_input = self._parse_input(element)
|
||||
@ -112,11 +109,13 @@ class DMNParser(NodeParser):
|
||||
output = self._parse_output(element)
|
||||
decisionTable.outputs.append(output)
|
||||
elif element.tag.endswith('rule'):
|
||||
rule = self._parse_rule(decisionTable, element)
|
||||
rule_counter += 1
|
||||
rule = self._parse_rule(decisionTable, element, rule_counter)
|
||||
decisionTable.rules.append(rule)
|
||||
else:
|
||||
raise Exception(
|
||||
'Unknown type in decision table: %r' % element.tag)
|
||||
raise ValidationException(
|
||||
'Unknown type in decision table: %r' % element.tag,
|
||||
node=element, file_name=self.filename)
|
||||
|
||||
def _parse_input(self, input_element):
|
||||
type_ref = None
|
||||
@ -142,9 +141,9 @@ class DMNParser(NodeParser):
|
||||
outputElement.attrib.get('typeRef', ''))
|
||||
return output
|
||||
|
||||
def _parse_rule(self, decisionTable, ruleElement):
|
||||
def _parse_rule(self, decisionTable, ruleElement, rowNumber):
|
||||
rule = Rule(ruleElement.attrib['id'])
|
||||
|
||||
rule.row_number = rowNumber
|
||||
input_idx = 0
|
||||
output_idx = 0
|
||||
for child in ruleElement:
|
||||
@ -189,6 +188,7 @@ class DMNParser(NodeParser):
|
||||
try:
|
||||
ast.parse(entry.text)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Malformed Output Expression '%s'. %s " % (entry.text, str(e)))
|
||||
raise ValidationException(
|
||||
"Malformed Output Expression '%s'. %s " % (entry.text, str(e)),
|
||||
node=element, file_name=self.filename)
|
||||
return entry
|
||||
|
@ -47,6 +47,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter):
|
||||
def rule_to_dict(self, rule):
|
||||
return {
|
||||
'id': rule.id,
|
||||
'row_number': rule.row_number,
|
||||
'description': rule.description,
|
||||
'input_entries': [self.input_entry_to_dict(entry) for entry in rule.inputEntries],
|
||||
'output_entries': [self.output_entry_to_dict(entry) for entry in rule.outputEntries],
|
||||
@ -91,6 +92,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter):
|
||||
def rule_from_dict(self, dct, inputs, outputs):
|
||||
rule = Rule(dct['id'])
|
||||
rule.description = dct['description']
|
||||
rule.row_number = dct.get('row_number', 0)
|
||||
rule.inputEntries = [self.input_entry_from_dict(entry, inputs)
|
||||
for entry in dct['input_entries']]
|
||||
rule.outputEntries = [self.output_entry_from_dict(entry, outputs)
|
||||
|
@ -1,4 +1,5 @@
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException, WorkflowException, \
|
||||
SpiffWorkflowException
|
||||
|
||||
from ...specs.Simple import Simple
|
||||
|
||||
@ -29,14 +30,11 @@ class BusinessRuleTask(Simple, BpmnSpecMixin):
|
||||
my_task.data = DeepMerge.merge(my_task.data,
|
||||
self.dmnEngine.result(my_task))
|
||||
super(BusinessRuleTask, self)._on_complete_hook(my_task)
|
||||
except SpiffWorkflowException as we:
|
||||
we.add_note(f"Business Rule Task '{my_task.task_spec.description}'.")
|
||||
raise we
|
||||
except Exception as e:
|
||||
raise WorkflowTaskExecException(my_task, str(e))
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_business_rule_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_business_rule_task(wf_spec, s_state)
|
||||
|
||||
error = WorkflowTaskException(str(e), task=my_task)
|
||||
error.add_note(f"Business Rule Task '{my_task.task_spec.description}'.")
|
||||
raise error
|
||||
|
||||
|
@ -36,31 +36,6 @@ class DecisionTable:
|
||||
self.outputs = []
|
||||
self.rules = []
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['name'] = self.name
|
||||
out['hit_policy'] = self.hit_policy
|
||||
out['inputs'] = [x.serialize() for x in self.inputs]
|
||||
out['outputs'] = [x.serialize() for x in self.outputs]
|
||||
out['rules'] = [x.serialize() for x in self.rules]
|
||||
return out
|
||||
|
||||
def deserialize(self,indict):
|
||||
self.id = indict['id']
|
||||
self.name = indict['name']
|
||||
if 'hit_policy' in indict:
|
||||
self.hit_policy = indict['hit_policy']
|
||||
else:
|
||||
self.hit_policy = HitPolicy.UNIQUE.value
|
||||
self.inputs = [Input(**x) for x in indict['inputs']]
|
||||
list(map(lambda x, y: x.deserialize(y), self.inputs, indict['inputs']))
|
||||
self.outputs = [Output(**x) for x in indict['outputs']]
|
||||
self.rules = [Rule(None) for x in indict['rules']]
|
||||
list(map(lambda x, y: x.deserialize(y),self.rules,indict['rules']))
|
||||
|
||||
|
||||
|
||||
|
||||
class Input:
|
||||
def __init__(self, id, label, name, expression, typeRef):
|
||||
@ -70,20 +45,6 @@ class Input:
|
||||
self.expression = expression
|
||||
self.typeRef = typeRef
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['label'] = self.label
|
||||
out['name'] = self.name
|
||||
out['expression'] = self.expression
|
||||
out['typeRef'] = self.typeRef
|
||||
return out
|
||||
|
||||
def deserialize(self,indict):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
class InputEntry:
|
||||
def __init__(self, id, input):
|
||||
@ -93,20 +54,6 @@ class InputEntry:
|
||||
self.description = ''
|
||||
self.lhs = []
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['input'] = self.input.serialize()
|
||||
out['description'] = self.description
|
||||
out['lhs'] = self.lhs
|
||||
return out
|
||||
|
||||
def deserialize(self, indict):
|
||||
self.id = indict['id']
|
||||
self.description = indict['description']
|
||||
self.lhs = indict['lhs']
|
||||
self.input = Input(**indict['input'])
|
||||
self.input.deserialize(indict['input'])
|
||||
|
||||
class Output:
|
||||
def __init__(self, id, label, name, typeRef):
|
||||
@ -115,14 +62,6 @@ class Output:
|
||||
self.name = name
|
||||
self.typeRef = typeRef
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['label'] = self.label
|
||||
out['name'] = self.name
|
||||
out['typeRef'] = self.typeRef
|
||||
return out
|
||||
|
||||
|
||||
class OutputEntry:
|
||||
def __init__(self, id, output):
|
||||
@ -132,45 +71,15 @@ class OutputEntry:
|
||||
self.description = ''
|
||||
self.text = ''
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['output'] = self.output.serialize()
|
||||
out['description'] = self.description
|
||||
out['text'] = self.text
|
||||
return out
|
||||
|
||||
def deserialize(self, indict):
|
||||
self.id = indict['id']
|
||||
self.description = indict['description']
|
||||
self.text = indict['text']
|
||||
self.output = Output(**indict['output'])
|
||||
|
||||
|
||||
|
||||
class Rule:
|
||||
def __init__(self, id):
|
||||
self.id = id
|
||||
|
||||
self.row_number = 0
|
||||
self.description = ''
|
||||
self.inputEntries = []
|
||||
self.outputEntries = []
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['description'] = self.description
|
||||
out['inputEntries'] = [x.serialize() for x in self.inputEntries]
|
||||
out['outputEntries'] = [x.serialize() for x in self.outputEntries]
|
||||
return out
|
||||
|
||||
def deserialize(self,indict):
|
||||
self.id = indict['id']
|
||||
self.description = indict['description']
|
||||
self.inputEntries = [InputEntry(None,None) for x in indict['inputEntries']]
|
||||
list(map(lambda x,y : x.deserialize(y), self.inputEntries, indict['inputEntries']))
|
||||
self.outputEntries = [OutputEntry(None, None) for x in indict['outputEntries']]
|
||||
list(map(lambda x, y: x.deserialize(y), self.outputEntries, indict['outputEntries']))
|
||||
|
||||
def output_as_dict(self, task):
|
||||
script_engine = task.workflow.script_engine
|
||||
|
@ -15,25 +15,45 @@
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import re
|
||||
|
||||
from SpiffWorkflow.util import levenshtein
|
||||
|
||||
|
||||
class WorkflowException(Exception):
|
||||
class SpiffWorkflowException(Exception):
|
||||
"""
|
||||
Base class for all SpiffWorkflow-generated exceptions.
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.notes = []
|
||||
|
||||
def add_note(self, note):
|
||||
"""add_note is a python 3.11 feature, this can be removed when we
|
||||
stop supporting versions prior to 3.11"""
|
||||
self.notes.append(note)
|
||||
|
||||
def __str__(self):
|
||||
return super().__str__() + ". " + ". ".join(self.notes)
|
||||
|
||||
|
||||
class WorkflowException(SpiffWorkflowException):
|
||||
"""
|
||||
Base class for all SpiffWorkflow-generated exceptions.
|
||||
"""
|
||||
|
||||
def __init__(self, sender, error):
|
||||
def __init__(self, message, task_spec=None):
|
||||
"""
|
||||
Standard exception class.
|
||||
|
||||
:param sender: the task spec that threw the exception
|
||||
:type sender: TaskSpec
|
||||
:param error: a human readable error message
|
||||
:param task_spec: the task spec that threw the exception
|
||||
:type task_spec: TaskSpec
|
||||
:param error: a human-readable error message
|
||||
:type error: string
|
||||
"""
|
||||
Exception.__init__(self, str(error))
|
||||
super().__init__(str(message))
|
||||
# Points to the TaskSpec that generated the exception.
|
||||
self.sender = sender
|
||||
self.task_spec = task_spec
|
||||
|
||||
@staticmethod
|
||||
def get_task_trace(task):
|
||||
@ -45,10 +65,31 @@ class WorkflowException(Exception):
|
||||
task_trace.append(f"{workflow.spec.task_specs[caller].description} ({workflow.spec.file})")
|
||||
return task_trace
|
||||
|
||||
@staticmethod
|
||||
def did_you_mean_from_name_error(name_exception, options):
|
||||
"""Returns a string along the lines of 'did you mean 'dog'? Given
|
||||
a name_error, and a set of possible things that could have been called,
|
||||
or an empty string if no valid suggestions come up. """
|
||||
if isinstance(name_exception, NameError):
|
||||
def_match = re.match("name '(.+)' is not defined", str(name_exception))
|
||||
if def_match:
|
||||
bad_variable = re.match("name '(.+)' is not defined",
|
||||
str(name_exception)).group(1)
|
||||
most_similar = levenshtein.most_similar(bad_variable,
|
||||
options, 3)
|
||||
error_msg = ""
|
||||
if len(most_similar) == 1:
|
||||
error_msg += f' Did you mean \'{most_similar[0]}\'?'
|
||||
if len(most_similar) > 1:
|
||||
error_msg += f' Did you mean one of \'{most_similar}\'?'
|
||||
return error_msg
|
||||
|
||||
|
||||
class WorkflowTaskException(WorkflowException):
|
||||
"""WorkflowException that provides task_trace information."""
|
||||
|
||||
def __init__(self, task, error_msg, exception=None):
|
||||
def __init__(self, error_msg, task=None, exception=None,
|
||||
line_number=None, offset=None, error_line=None):
|
||||
"""
|
||||
Exception initialization.
|
||||
|
||||
@ -60,17 +101,31 @@ class WorkflowTaskException(WorkflowException):
|
||||
:type exception: Exception
|
||||
"""
|
||||
|
||||
self.exception = exception
|
||||
self.task = task
|
||||
self.line_number = line_number
|
||||
self.offset = offset
|
||||
self.error_line = error_line
|
||||
if exception:
|
||||
self.error_type = exception.__class__.__name__
|
||||
else:
|
||||
self.error_type = "unknown"
|
||||
super().__init__(error_msg, task_spec=task.task_spec)
|
||||
|
||||
# If encountered in a sub-workflow, this traces back up the stack
|
||||
# so we can tell how we got to this paticular task, no matter how
|
||||
if isinstance(exception, SyntaxError) and not line_number:
|
||||
# Line number and offset can be recovered directly from syntax errors,
|
||||
# otherwise they must be passed in.
|
||||
self.line_number = exception.lineno
|
||||
self.offset = exception.offset
|
||||
elif isinstance(exception, NameError):
|
||||
self.add_note(self.did_you_mean_from_name_error(exception, list(task.data.keys())))
|
||||
|
||||
# If encountered in a sub-workflow, this traces back up the stack,
|
||||
# so we can tell how we got to this particular task, no matter how
|
||||
# deeply nested in sub-workflows it is. Takes the form of:
|
||||
# task-description (file-name)
|
||||
self.task_trace = self.get_task_trace(task)
|
||||
|
||||
super().__init__(task.task_spec, error_msg)
|
||||
|
||||
|
||||
class StorageException(Exception):
|
||||
class StorageException(SpiffWorkflowException):
|
||||
pass
|
||||
|
@ -225,7 +225,7 @@ class Operator(Term):
|
||||
return values
|
||||
|
||||
def _matches(self, task):
|
||||
raise Exception("Abstract class, do not call")
|
||||
raise NotImplementedError("Abstract class, do not call")
|
||||
|
||||
def serialize(self, serializer):
|
||||
"""
|
||||
|
@ -586,29 +586,17 @@ class DictionarySerializer(Serializer):
|
||||
assert isinstance(workflow, Workflow)
|
||||
s_state = dict()
|
||||
if include_spec:
|
||||
s_state['wf_spec'] = self.serialize_workflow_spec(workflow.spec,
|
||||
**kwargs)
|
||||
s_state['wf_spec'] = self.serialize_workflow_spec(workflow.spec, **kwargs)
|
||||
|
||||
# data
|
||||
s_state['data'] = self.serialize_dict(workflow.data)
|
||||
|
||||
# last_node
|
||||
value = workflow.last_task
|
||||
s_state['last_task'] = value.id if value is not None else None
|
||||
|
||||
# outer_workflow
|
||||
# s_state['outer_workflow'] = workflow.outer_workflow.id
|
||||
|
||||
# success
|
||||
s_state['success'] = workflow.success
|
||||
|
||||
# task_tree
|
||||
s_state['task_tree'] = self.serialize_task(workflow.task_tree)
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_workflow(self, s_state, wf_class=Workflow,
|
||||
read_only=False, wf_spec=None, **kwargs):
|
||||
def deserialize_workflow(self, s_state, wf_class=Workflow, wf_spec=None, **kwargs):
|
||||
"""It is possible to override the workflow class, and specify a
|
||||
workflow_spec, otherwise the spec is assumed to be serialized in the
|
||||
s_state['wf_spec']"""
|
||||
@ -616,23 +604,9 @@ class DictionarySerializer(Serializer):
|
||||
if wf_spec is None:
|
||||
wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs)
|
||||
workflow = wf_class(wf_spec)
|
||||
|
||||
workflow.read_only = read_only
|
||||
|
||||
# data
|
||||
workflow.data = self.deserialize_dict(s_state['data'])
|
||||
|
||||
# outer_workflow
|
||||
# workflow.outer_workflow =
|
||||
# find_workflow_by_id(remap_workflow_id(s_state['outer_workflow']))
|
||||
|
||||
# success
|
||||
workflow.success = s_state['success']
|
||||
|
||||
# workflow
|
||||
workflow.spec = wf_spec
|
||||
|
||||
# task_tree
|
||||
workflow.task_tree = self.deserialize_task(
|
||||
workflow, s_state['task_tree'])
|
||||
|
||||
@ -641,15 +615,11 @@ class DictionarySerializer(Serializer):
|
||||
for task in tasklist:
|
||||
task.parent = workflow.get_task(task.parent,tasklist)
|
||||
|
||||
# last_task
|
||||
workflow.last_task = workflow.get_task(s_state['last_task'],tasklist)
|
||||
|
||||
# task_mapping
|
||||
workflow.update_task_mapping()
|
||||
|
||||
return workflow
|
||||
|
||||
|
||||
def serialize_task(self, task, skip_children=False, allow_subs=False):
|
||||
"""
|
||||
:param allow_subs: Allows sub-serialization to take place, otherwise
|
||||
|
@ -53,14 +53,6 @@ class JSONSerializer(DictionarySerializer):
|
||||
|
||||
return dct
|
||||
|
||||
def _jsonableHandler(self, obj):
|
||||
if hasattr(obj, 'jsonable'):
|
||||
return obj.jsonable()
|
||||
|
||||
raise 'Object of type %s with value of %s is not JSON serializable' % (
|
||||
type(obj), repr(obj))
|
||||
|
||||
|
||||
def _default(self, obj):
|
||||
if isinstance(obj, uuid.UUID):
|
||||
return {'__uuid__': obj.hex}
|
||||
|
@ -16,5 +16,5 @@ class SignavioBpmnParser(BpmnParser):
|
||||
'Intermediate Catch Event has no incoming sequences. '
|
||||
'This might be a Boundary Event that has been '
|
||||
'disconnected.',
|
||||
node=catch_event, filename=filename)
|
||||
return super().add_bpmn_xml(bpmn, filename)
|
||||
node=catch_event, file_name=filename)
|
||||
return super().add_bpmn_xml(bpmn, filename)
|
||||
|
@ -24,7 +24,7 @@ class CallActivityParser(TaskParser):
|
||||
f"The process '{called_element}' was not found. Did you mean one of the following: "
|
||||
f"{', '.join(self.process_parser.parser.get_process_ids())}?",
|
||||
node=self.node,
|
||||
filename=self.process_parser.filename)
|
||||
file_name=self.process_parser.filename)
|
||||
return parser.get_id()
|
||||
|
||||
def _fix_call_activities(self):
|
||||
@ -41,5 +41,5 @@ class CallActivityParser(TaskParser):
|
||||
if not signavio_meta_data:
|
||||
raise ValidationException(
|
||||
'No Signavio "Subprocess reference" specified.',
|
||||
node=self.node, filename=self.filename)
|
||||
node=self.node, file_name=self.filename)
|
||||
return one(signavio_meta_data).get('metaValue')
|
||||
|
@ -53,7 +53,7 @@ class Cancel(TaskSpec):
|
||||
"""
|
||||
TaskSpec.test(self)
|
||||
if len(self.outputs) > 0:
|
||||
raise WorkflowException(self, 'Cancel with an output.')
|
||||
raise WorkflowException('Cancel with an output.', task_spec=self)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
my_task.workflow.cancel(self.cancel_successfully)
|
||||
|
@ -64,7 +64,7 @@ class ExclusiveChoice(MultiChoice):
|
||||
"""
|
||||
MultiChoice.test(self)
|
||||
if self.default_task_spec is None:
|
||||
raise WorkflowException(self, 'A default output is required.')
|
||||
raise WorkflowException('A default output is required.', task_spec=self)
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
# If the task's status is not predicted, we default to MAYBE
|
||||
@ -84,8 +84,7 @@ class ExclusiveChoice(MultiChoice):
|
||||
break
|
||||
|
||||
if output is None:
|
||||
raise WorkflowException(self,
|
||||
f'No conditions satisfied for {my_task.task_spec.name}')
|
||||
raise WorkflowException(f'No conditions satisfied for {my_task.task_spec.name}', task_spec=self)
|
||||
|
||||
my_task._sync_children([output], TaskState.FUTURE)
|
||||
|
||||
|
@ -154,7 +154,7 @@ class Join(TaskSpec):
|
||||
split_task = my_task._find_ancestor_from_name(self.split_task)
|
||||
if split_task is None:
|
||||
msg = 'Join with %s, which was not reached' % self.split_task
|
||||
raise WorkflowException(self, msg)
|
||||
raise WorkflowException(msg, task_spec=self)
|
||||
tasks = split_task.task_spec._get_activated_tasks(split_task, my_task)
|
||||
|
||||
# The default threshold is the number of branches that were started.
|
||||
|
@ -57,10 +57,11 @@ class LoopResetTask(TaskSpec):
|
||||
# maybe upstream someone will be able to handle this situation
|
||||
task._set_state(TaskState.WAITING)
|
||||
if isinstance(e, WorkflowTaskException):
|
||||
e.add_note('Error occurred during a loop back to a previous step.')
|
||||
raise e
|
||||
else:
|
||||
raise WorkflowTaskException(
|
||||
task, 'Error during loop back:' + str(e), e)
|
||||
'Error during loop back:' + str(e), task=task, exception=e)
|
||||
super(LoopResetTask, self)._on_complete_hook(task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
|
@ -70,14 +70,14 @@ class MultiChoice(TaskSpec):
|
||||
"""
|
||||
TaskSpec.test(self)
|
||||
if len(self.cond_task_specs) < 1:
|
||||
raise WorkflowException(self, 'At least one output required.')
|
||||
raise WorkflowException('At least one output required.', task_spec=self)
|
||||
for condition, name in self.cond_task_specs:
|
||||
if name is None:
|
||||
raise WorkflowException(self, 'Condition with no task spec.')
|
||||
raise WorkflowException('Condition with no task spec.', task_spec=self)
|
||||
task_spec = self._wf_spec.get_task_spec_from_name(name)
|
||||
if task_spec is None:
|
||||
msg = 'Condition leads to non-existent task ' + repr(name)
|
||||
raise WorkflowException(self, msg)
|
||||
raise WorkflowException(msg, task_spec=self)
|
||||
if condition is None:
|
||||
continue
|
||||
|
||||
|
@ -44,7 +44,7 @@ class StartTask(TaskSpec):
|
||||
"""
|
||||
Called by the previous task to let us know that it exists.
|
||||
"""
|
||||
raise WorkflowException(self, 'StartTask can not have any inputs.')
|
||||
raise WorkflowException('StartTask can not have any inputs.', task_spec=self)
|
||||
|
||||
def test(self):
|
||||
"""
|
||||
@ -52,9 +52,9 @@ class StartTask(TaskSpec):
|
||||
if an error was detected.
|
||||
"""
|
||||
if len(self.inputs) != 0:
|
||||
raise WorkflowException(self, 'StartTask with an input.')
|
||||
raise WorkflowException('StartTask with an input.', task_spec=self)
|
||||
elif len(self.outputs) < 1:
|
||||
raise WorkflowException(self, 'No output task connected.')
|
||||
raise WorkflowException('No output task connected.', task_spec=self)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_start_task(self)
|
||||
|
@ -72,8 +72,7 @@ class SubWorkflow(TaskSpec):
|
||||
def test(self):
|
||||
TaskSpec.test(self)
|
||||
if self.file is not None and not os.path.exists(self.file):
|
||||
raise WorkflowException(
|
||||
self, 'File does not exist: %s' % self.file)
|
||||
raise WorkflowException('File does not exist: %s' % self.file, task_spec=self)
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
# Modifying the task spec is a TERRIBLE idea, but if we don't do it, sync_children won't work
|
||||
|
@ -63,7 +63,7 @@ class ThreadMerge(Join):
|
||||
split_task = my_task._find_ancestor_from_name(self.split_task)
|
||||
if split_task is None:
|
||||
msg = 'Join with %s, which was not reached' % self.split_task
|
||||
raise WorkflowException(self, msg)
|
||||
raise WorkflowException(msg, task_spec=self)
|
||||
tasks = split_task.task_spec._get_activated_threads(split_task)
|
||||
|
||||
# The default threshold is the number of threads that were started.
|
||||
|
@ -152,33 +152,35 @@ class WorkflowSpec(object):
|
||||
def recursive_dump(task_spec, indent):
|
||||
if task_spec in done:
|
||||
return '[shown earlier] %s (%s:%s)' % (
|
||||
task_spec.name, task_spec.__class__.__name__,
|
||||
hex(id(task_spec))) + '\n'
|
||||
|
||||
task_spec.name,
|
||||
task_spec.__class__.__name__,
|
||||
hex(id(task_spec))
|
||||
) + '\n'
|
||||
done.add(task_spec)
|
||||
dump = '%s (%s:%s)' % (
|
||||
task_spec.name,
|
||||
task_spec.__class__.__name__, hex(id(task_spec))) + '\n'
|
||||
task_spec.__class__.__name__,
|
||||
hex(id(task_spec))
|
||||
) + '\n'
|
||||
if verbose:
|
||||
if task_spec.inputs:
|
||||
dump += indent + '- IN: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t)))
|
||||
for t in task_spec.inputs]) + '\n'
|
||||
dump += indent + \
|
||||
'- IN: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t))) for t in task_spec.inputs]) + \
|
||||
'\n'
|
||||
if task_spec.outputs:
|
||||
dump += indent + '- OUT: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t)))
|
||||
for t in task_spec.outputs]) + '\n'
|
||||
sub_specs = ([task_spec.spec.start] if hasattr(
|
||||
task_spec, 'spec') else []) + task_spec.outputs
|
||||
for i, t in enumerate(sub_specs):
|
||||
dump += indent + ' --> ' + \
|
||||
recursive_dump(
|
||||
t, indent + (' | ' if i + 1 < len(sub_specs) else
|
||||
' '))
|
||||
dump += indent + \
|
||||
'- OUT: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t))) for t in task_spec.outputs]) + \
|
||||
'\n'
|
||||
# sub_specs = ([task_spec.spec.start] if hasattr(task_spec, 'spec') else []) + task_spec.outputs
|
||||
for i, t in enumerate(task_spec.outputs):
|
||||
dump += indent + \
|
||||
' --> ' + \
|
||||
recursive_dump(t, indent + (' | ' if i + 1 < len(task_spec.outputs) else ' '))
|
||||
return dump
|
||||
|
||||
dump = recursive_dump(self.start, '')
|
||||
|
||||
return dump
|
||||
|
||||
def dump(self):
|
||||
|
@ -1,5 +1,6 @@
|
||||
from copy import deepcopy
|
||||
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.specs .BpmnSpecMixin import BpmnSpecMixin
|
||||
|
||||
@ -36,9 +37,17 @@ class SpiffBpmnTask(BpmnSpecMixin):
|
||||
def _on_ready_hook(self, my_task):
|
||||
super()._on_ready_hook(my_task)
|
||||
if self.prescript is not None:
|
||||
self.execute_script(my_task, self.prescript)
|
||||
try:
|
||||
self.execute_script(my_task, self.prescript)
|
||||
except SpiffWorkflowException as se:
|
||||
se.add_note("Error occurred in the Pre-Script")
|
||||
raise se
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
if self.postscript is not None:
|
||||
self.execute_script(my_task, self.postscript)
|
||||
super()._on_complete_hook(my_task)
|
||||
try:
|
||||
self.execute_script(my_task, self.postscript)
|
||||
except SpiffWorkflowException as se:
|
||||
se.add_note("Error occurred in the Post-Script")
|
||||
raise se
|
||||
super()._on_complete_hook(my_task)
|
||||
|
@ -182,8 +182,7 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
||||
# Assure we don't recurse forever.
|
||||
self.count += 1
|
||||
if self.count > self.MAX_ITERATIONS:
|
||||
raise WorkflowException(current,
|
||||
"Task Iterator entered infinite recursion loop" )
|
||||
raise WorkflowException("Task Iterator entered infinite recursion loop", task_spec=current)
|
||||
|
||||
|
||||
# If the current task has children, the first child is the next
|
||||
@ -266,8 +265,8 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
||||
def state(self, value):
|
||||
if value < self._state:
|
||||
raise WorkflowException(
|
||||
self.task_spec,
|
||||
'state went from %s to %s!' % (self.get_state_name(), TaskStateNames[value])
|
||||
'state went from %s to %s!' % (self.get_state_name(), TaskStateNames[value]),
|
||||
task_spec=self.task_spec
|
||||
)
|
||||
self._set_state(value)
|
||||
|
||||
@ -345,8 +344,8 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
||||
if self.is_looping():
|
||||
self.terminate_current_loop = True
|
||||
else:
|
||||
raise WorkflowException(self.task_spec,
|
||||
'The method terminate_loop should only be called in the case of a BPMN Loop Task')
|
||||
raise WorkflowException('The method terminate_loop should only be called in the case of a BPMN Loop Task',
|
||||
task_spec=self)
|
||||
|
||||
def is_looping(self):
|
||||
"""Returns true if this is a looping task."""
|
||||
@ -475,7 +474,7 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
||||
raise ValueError(self, '_add_child() requires a TaskSpec')
|
||||
if self._is_predicted() and state & TaskState.PREDICTED_MASK == 0:
|
||||
msg = 'Attempt to add non-predicted child to predicted task'
|
||||
raise WorkflowException(self.task_spec, msg)
|
||||
raise WorkflowException(msg, task_spec=self.task_spec)
|
||||
task = Task(self.workflow, task_spec, self, state=state)
|
||||
task.thread_id = self.thread_id
|
||||
if state == TaskState.READY:
|
||||
@ -551,7 +550,7 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
||||
|
||||
# Definite tasks must not be removed, so they HAVE to be in the given task spec list.
|
||||
if child._is_definite():
|
||||
raise WorkflowException(self.task_spec, f'removal of non-predicted child {child}')
|
||||
raise WorkflowException(f'removal of non-predicted child {child}', task_spec=self.task_spec)
|
||||
unneeded_children.append(child)
|
||||
|
||||
# Remove and add the children accordingly.
|
||||
|
@ -257,7 +257,7 @@ class Workflow(object):
|
||||
:param task_id: The id of the Task object.
|
||||
"""
|
||||
if task_id is None:
|
||||
raise WorkflowException(self.spec, 'task_id is None')
|
||||
raise WorkflowException('task_id is None', task_spec=self.spec)
|
||||
data = {}
|
||||
if self.last_task and self.last_task.data:
|
||||
data = self.last_task.data
|
||||
@ -265,7 +265,7 @@ class Workflow(object):
|
||||
if task.id == task_id:
|
||||
return task.reset_token(data)
|
||||
msg = 'A task with the given task_id (%s) was not found' % task_id
|
||||
raise WorkflowException(self.spec, msg)
|
||||
raise WorkflowException(msg, task_spec=self.spec)
|
||||
|
||||
def get_reset_task_spec(self, destination):
|
||||
"""
|
||||
@ -300,12 +300,12 @@ class Workflow(object):
|
||||
:param task_id: The id of the Task object.
|
||||
"""
|
||||
if task_id is None:
|
||||
raise WorkflowException(self.spec, 'task_id is None')
|
||||
raise WorkflowException('task_id is None', task_spec=self.spec)
|
||||
for task in self.task_tree:
|
||||
if task.id == task_id:
|
||||
return task.complete()
|
||||
msg = 'A task with the given task_id (%s) was not found' % task_id
|
||||
raise WorkflowException(self.spec, msg)
|
||||
raise WorkflowException(msg, task_spec=self.spec)
|
||||
|
||||
def complete_next(self, pick_up=True, halt_on_manual=True):
|
||||
"""
|
||||
|
90
SpiffWorkflow/doc/errors.rst
Normal file
90
SpiffWorkflow/doc/errors.rst
Normal file
@ -0,0 +1,90 @@
|
||||
SpiffWorkflow Exceptions
|
||||
====================================
|
||||
Details about the exceptions and exception hierarchy within SpiffWorkflow
|
||||
|
||||
SpiffWorkflowException
|
||||
----------
|
||||
Base exception for all exceptions raised by SpiffWorkflow
|
||||
|
||||
ValidationException
|
||||
----------
|
||||
|
||||
**Extends**
|
||||
SpiffWorkflowException
|
||||
|
||||
Thrown during the parsing of a workflow.
|
||||
|
||||
**Attributes/Methods**
|
||||
|
||||
- **tag**: The type of xml tag being parsed
|
||||
- **id**: the id attribute of the xml tag, if available.
|
||||
- **name**: the name attribute of the xml tag, if available.
|
||||
- **line_number**: the line number where the tag occurs.
|
||||
- **file_name**: The name of the file where the error occurred.
|
||||
- **message**: a human readable error message.
|
||||
|
||||
|
||||
WorkflowException
|
||||
--------
|
||||
When an error occurs with a Task Specification (maybe should have been called
|
||||
a SpecException)
|
||||
|
||||
**Extends**
|
||||
SpiffWorkflowException
|
||||
|
||||
**Attributes/Methods**
|
||||
|
||||
- **sender**: The TaskSpec - the specific Task, Gateway, etc... that caused the error to happen.
|
||||
- **error**: a human readable error message describing the problem.
|
||||
- **get_task_trace**: Provided a specific Task, will work it's way through the workflow / sub-processes
|
||||
and call activities to show where an error occurred. Useful if the error happened within a deeply nested structure (where call activities include call activities ....)
|
||||
|
||||
WorkflowDataException
|
||||
------------------
|
||||
When an exception occurs moving data between tasks and Data Objects (including
|
||||
data inputs and data outputs.)
|
||||
|
||||
**Extends**
|
||||
WorkflowException
|
||||
|
||||
**Attributes/Methods**
|
||||
|
||||
(in addition to the values in a WorkflowException)
|
||||
|
||||
- **task**: The specific task (not the task spec, but the actual executing task)
|
||||
- **data_input**: The spec of the input variable
|
||||
- **data_output**: The spec of the output variable
|
||||
|
||||
WorkflowTaskException
|
||||
--------
|
||||
**Extends**
|
||||
WorkflowException
|
||||
|
||||
**Attributes/Methods**
|
||||
|
||||
(in addition to the values in a WorkflowException)
|
||||
|
||||
- **task**: The specific task (not the task spec, but the actual executing task)
|
||||
- **error_msg**: The detailed human readable message. (conflicts with error above)
|
||||
- **exception**: The original exception this wraps around.
|
||||
- **line_number** The line number that contains the error
|
||||
- **offset** The point in the line that caused the error
|
||||
- **error_line** The content of the line that caused the error.
|
||||
|
||||
It will accept the line_number and error_line as arguments - if the
|
||||
underlying error provided is a SyntaxError it will try to derive this
|
||||
information from the error.
|
||||
If this is a name error, it will attempt to calculate a did-you-mean
|
||||
error_msg.
|
||||
|
||||
Unused / Deprecated errors
|
||||
--------------------
|
||||
|
||||
** StorageException **
|
||||
Deprecated -- Used only by the PrettyXmlSerializer - which is not under active
|
||||
support.
|
||||
|
||||
** DeadMethodCalled **
|
||||
Something related to WeakMethod -- which doesn't look to be utilized anymore.
|
||||
|
||||
|
@ -26,13 +26,13 @@
|
||||
showgrid="false"
|
||||
showguides="true"
|
||||
inkscape:guide-bbox="true"
|
||||
inkscape:zoom="0.27433373"
|
||||
inkscape:cx="-586.87643"
|
||||
inkscape:cy="1882.7433"
|
||||
inkscape:zoom="1.5518659"
|
||||
inkscape:cx="2265.0153"
|
||||
inkscape:cy="3541.2209"
|
||||
inkscape:window-width="1916"
|
||||
inkscape:window-height="1076"
|
||||
inkscape:window-height="916"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-y="162"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer1">
|
||||
<sodipodi:guide
|
||||
@ -70,6 +70,18 @@
|
||||
position="30.565253,-441.07447"
|
||||
orientation="1,0"
|
||||
id="guide37861" />
|
||||
<sodipodi:guide
|
||||
position="700.40724,-579.16044"
|
||||
orientation="0,-1"
|
||||
id="guide49435" />
|
||||
<sodipodi:guide
|
||||
position="639.13561,-614.28335"
|
||||
orientation="0,-1"
|
||||
id="guide49437" />
|
||||
<sodipodi:guide
|
||||
position="734.70955,-597.73685"
|
||||
orientation="1,0"
|
||||
id="guide83679" />
|
||||
</sodipodi:namedview>
|
||||
<defs
|
||||
id="defs2">
|
||||
@ -839,6 +851,91 @@
|
||||
x="-580.08496"
|
||||
y="716.69928">Draw the code</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
id="path73257-7-2-9-159-9-8"
|
||||
style="vector-effect:none;fill:#126d82;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:4.433;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="m 599.99956,911.28335 v 0.91595 c 0.96107,0.84548 1.66181,1.94689 2.02041,3.17567 h -17.54738 c -3.11685,0 -5.68164,2.56482 -5.68164,5.68159 v 3.91192 c 0,3.11682 2.56479,5.68164 5.68164,5.68164 h 5.6851 4.69113 5.6878 c 1.21493,0 2.13344,0.91846 2.13344,2.13344 v 3.91632 c 0,1.21498 -0.91851,2.13346 -2.13344,2.13346 h -14.79076 c -0.75878,-2.29982 -2.93713,-3.97943 -5.47735,-3.97943 -3.16218,0 -5.76138,2.60267 -5.76138,5.76489 0,3.1622 2.5992,5.76399 5.76138,5.76399 2.54974,0 4.73517,-1.69176 5.48615,-4.00482 h 14.78196 c 3.1168,0 5.67808,-2.5613 5.67808,-5.67809 v -3.91632 c 0,-3.11677 -2.56128,-5.68164 -5.67808,-5.68164 h -5.6878 -4.69113 -5.6851 c -1.21497,0 -2.13609,-0.91837 -2.13609,-2.13344 v -3.91192 c 0,-1.21499 0.92112,-2.13696 2.13609,-2.13696 h 17.60609 c -0.33391,1.31874 -1.05865,2.50576 -2.07912,3.4053 v 0.68721 l 11.72877,-5.86483 z m -19.73105,27.11871 c 1.24555,0 2.21936,0.97116 2.21936,2.21674 0,1.24556 -0.97381,2.21936 -2.21936,2.21936 -1.24559,0 -2.21675,-0.9738 -2.21675,-2.21936 0,-1.24558 0.97116,-2.21674 2.21675,-2.21674 z"
|
||||
sodipodi:nodetypes="cccssssccsssscssscssssccsssscccccsssss"
|
||||
inkscape:export-filename="/home/dan/spiffworkflow.png"
|
||||
inkscape:export-xdpi="136.48"
|
||||
inkscape:export-ydpi="136.48" />
|
||||
<path
|
||||
id="path85339"
|
||||
style="color:#000000;fill:#126d82;fill-rule:evenodd;stroke-width:3.77953;stroke-linecap:round;stroke-linejoin:round;-inkscape-stroke:none"
|
||||
d="M 2575.416 3444.2207 L 2575.416 3447.6816 C 2579.0484 3450.8772 2581.6954 3455.0413 2583.0508 3459.6855 L 2516.7305 3459.6855 C 2504.9502 3459.6855 2495.2559 3469.3783 2495.2559 3481.1582 L 2495.2559 3487.6719 L 2475.9883 3506.9375 A 6.6148345 6.6148345 0 0 0 2475.9883 3516.293 L 2495.2559 3535.5586 L 2495.2559 3540.2656 C 2495.2559 3552.0455 2504.9502 3561.7402 2516.7305 3561.7402 L 2583.0508 3561.7402 C 2581.6954 3566.3844 2579.0484 3570.5467 2575.416 3573.7422 L 2575.416 3577.2051 L 2619.7441 3555.041 L 2575.416 3532.875 L 2575.416 3535.4727 C 2579.2729 3538.8725 2582.0114 3543.3595 2583.2734 3548.3438 L 2516.7305 3548.3438 C 2512.1385 3548.3437 2508.6582 3544.8577 2508.6582 3540.2656 L 2508.6582 3535.6543 L 2526.8945 3517.418 L 2538.2168 3517.418 L 2555.9473 3517.418 L 2577.4453 3517.418 L 2577.4453 3504.0078 L 2555.9473 3504.0078 L 2538.2168 3504.0078 L 2525.0801 3504.0078 L 2508.6582 3487.5879 L 2508.6582 3481.1582 C 2508.6582 3476.5661 2512.1385 3473.082 2516.7305 3473.082 L 2583.2734 3473.082 C 2582.0114 3478.0662 2579.2729 3482.5533 2575.416 3485.9531 L 2575.416 3488.5488 L 2619.7441 3466.3828 L 2575.416 3444.2207 z M 2502 3499.6348 L 2513.9844 3511.6211 L 2502.0039 3523.5996 L 2490.0195 3511.6152 L 2502 3499.6348 z "
|
||||
transform="scale(0.26458333)" />
|
||||
<path
|
||||
id="path28509-8"
|
||||
style="color:#000000;fill:#126d82;fill-rule:evenodd;stroke-width:3.77953;stroke-linecap:round;-inkscape-stroke:none"
|
||||
d="M 2399.5488 3444.8691 C 2399.2119 3444.8591 2398.8728 3444.8589 2398.5312 3444.8789 C 2393.0057 3445.089 2388.3208 3448.0519 2384.6758 3451.8398 C 2381.0308 3455.6276 2378.0721 3460.419 2375.375 3465.8945 C 2373.2508 3470.2068 2371.3058 3474.9651 2369.5156 3479.9805 L 2345.8223 3493.6602 A 6.6148344 6.6148344 0 0 0 2343.4023 3502.6953 L 2357.3008 3526.7676 C 2353.7347 3544.8143 2351.9297 3558.8242 2351.9297 3558.8242 C 2351.4636 3562.4489 2354.0254 3565.7647 2357.6504 3566.2285 C 2361.2731 3566.6948 2364.5879 3564.1363 2365.0547 3560.5137 C 2365.0547 3560.5137 2366.8057 3547.0108 2370.2109 3529.6914 L 2391.2676 3517.5332 L 2423.0762 3519.1836 C 2421.901 3514.4062 2420.5091 3511.1637 2419.0352 3506.375 L 2394.916 3505.125 L 2382.3672 3483.3887 C 2383.9174 3479.1251 2385.5484 3475.1771 2387.2441 3471.7344 C 2389.5662 3467.0204 2392.0321 3463.2741 2394.2129 3461.0078 C 2396.3937 3458.7414 2397.9351 3458.1336 2399.0332 3458.0918 C 2400.1917 3458.0478 2401.6224 3458.4993 2403.7949 3460.4766 C 2405.9674 3462.454 2408.4992 3465.8585 2410.9258 3470.2285 C 2415.779 3478.9684 2420.2895 3491.437 2423.9551 3504.0078 C 2428.2848 3518.856 2431.4634 3533.6918 2433.3691 3543.4609 C 2428.6211 3543.3167 2423.9981 3541.8521 2420.0293 3539.2148 L 2417.5215 3539.8867 L 2450.4043 3576.9688 L 2460.3398 3528.4141 L 2456.9961 3529.3105 C 2454.8496 3533.6462 2451.5141 3537.2818 2447.3789 3539.793 L 2441.7285 3519.5527 C 2440.2594 3513.4161 2438.5627 3506.8668 2436.6504 3500.3086 C 2432.8362 3487.2285 2428.209 3474.1159 2422.4844 3463.8066 C 2419.6221 3458.652 2416.508 3454.1637 2412.6973 3450.6953 C 2409.1248 3447.4436 2404.6033 3444.9934 2399.5488 3444.8711 L 2399.5488 3444.8691 z M 2375.7188 3477.7051 C 2375.7747 3477.7091 2375.8288 3477.727 2375.8848 3477.7324 A 6.6148344 6.6148344 0 0 0 2375.666 3477.707 C 2375.6835 3477.7081 2375.7013 3477.7038 2375.7188 3477.7051 z M 2372.8359 3493.3379 L 2381.3066 3508.0098 L 2366.6348 3516.4805 L 2358.1641 3501.8086 L 2372.8359 3493.3379 z "
|
||||
transform="scale(0.26458333)" />
|
||||
<g
|
||||
id="g38755">
|
||||
<path
|
||||
d="m 590.37968,815.93796 q 2.10753,0 3.40104,1.63919 1.30466,1.63918 1.30466,4.80606 0,2.11867 -0.6133,3.56829 -0.61331,1.43847 -1.69494,2.17443 -1.08165,0.73597 -2.48666,0.73597 -0.90323,0 -1.54998,-0.22302 -0.64675,-0.23417 -1.10395,-0.591 -0.45719,-0.36798 -0.79171,-0.78057 h -0.17842 q 0.0892,0.44604 0.13383,0.91439 0.0446,0.46833 0.0446,0.91437 v 5.02907 h -3.40104 v -17.95302 h 2.76543 l 0.47949,1.6169 h 0.15605 q 0.33452,-0.5018 0.81401,-0.92554 0.47949,-0.42374 1.14855,-0.66905 0.68021,-0.25647 1.57228,-0.25647 z m -1.09279,2.72082 q -0.89207,0 -1.41617,0.36799 -0.52408,0.36798 -0.76942,1.10393 -0.23416,0.73597 -0.25646,1.86222 v 0.36798 q 0,1.2043 0.22301,2.04062 0.23418,0.83631 0.76942,1.27121 0.54639,0.43488 1.49423,0.43488 0.78056,0 1.28236,-0.43488 0.50179,-0.4349 0.74711,-1.27121 0.25646,-0.84748 0.25646,-2.06292 0,-1.82875 -0.56869,-2.75428 -0.56869,-0.92554 -1.76185,-0.92554 z"
|
||||
id="path105589-9-3"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 600.11661,816.17212 v 12.46676 h -3.40104 v -12.46676 z m -1.69494,-4.8841 q 0.75826,0 1.30466,0.35683 0.54639,0.34568 0.54639,1.30466 0,0.94782 -0.54639,1.31581 -0.5464,0.35682 -1.30466,0.35682 -0.76942,0 -1.31581,-0.35682 -0.53524,-0.36799 -0.53524,-1.31581 0,-0.95898 0.53524,-1.30466 0.54639,-0.35683 1.31581,-0.35683 z"
|
||||
id="path105591-5-6"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
|
||||
d="m 619.0778,818.72569 h -2.94385 v 9.91319 h -3.40103 v -9.91319 h -1.87336 v -1.63918 l 1.87336,-0.91439 v -0.91437 q 0,-1.59458 0.53524,-2.4755 0.54639,-0.89209 1.52768,-1.24891 0.99243,-0.36799 2.34169,-0.36799 0.99245,0 1.80646,0.16725 0.81401,0.15605 1.32696,0.35684 l -0.86978,2.4978 q -0.39027,-0.12263 -0.84747,-0.22301 -0.45719,-0.10041 -1.04819,-0.10041 -0.71365,0 -1.04818,0.43488 -0.32338,0.42374 -0.32338,1.09279 v 0.78056 h 2.94385 z"
|
||||
id="path106049-0-7" />
|
||||
<path
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
|
||||
d="m 609.97621,818.72569 h -2.94384 v 9.91319 h -3.40104 v -9.91319 h -1.87335 v -1.63918 l 1.87335,-0.91439 v -0.91437 q 0,-1.59458 0.53525,-2.4755 0.5464,-0.89209 1.52767,-1.24891 0.99244,-0.36799 2.34171,-0.36799 0.99243,0 1.80644,0.16725 0.81403,0.15605 1.32697,0.35684 l -0.86978,2.4978 q -0.39028,-0.12263 -0.84746,-0.22301 -0.45719,-0.10041 -1.04819,-0.10041 -0.71366,0 -1.0482,0.43488 -0.32337,0.42374 -0.32337,1.09279 v 0.78056 h 2.94384 z"
|
||||
id="path105593-48-5" />
|
||||
<path
|
||||
d="m 642.93756,812.33621 -4.14815,16.30267 h -3.93629 l -2.20788,-8.56393 q -0.0668,-0.24532 -0.17842,-0.74711 -0.11144,-0.50178 -0.23417,-1.09278 -0.12264,-0.60216 -0.22302,-1.12626 -0.0892,-0.53524 -0.12263,-0.84746 -0.0334,0.31222 -0.13383,0.83632 -0.0892,0.5241 -0.21187,1.11509 -0.11144,0.591 -0.22303,1.10394 -0.11144,0.51294 -0.1784,0.78057 l -2.19675,8.54162 H 625.018 l -4.1593,-16.30267 h 3.40103 l 2.08522,8.89845 q 0.0892,0.40143 0.20073,0.95898 0.12263,0.55753 0.23417,1.17084 0.12264,0.60216 0.21185,1.17085 0.10042,0.55755 0.14503,0.97013 0.0558,-0.42373 0.14502,-0.98128 0.0892,-0.56869 0.18956,-1.14854 0.11144,-0.591 0.22303,-1.0928 0.11144,-0.50179 0.20071,-0.81401 l 2.37516,-9.13262 h 3.26722 l 2.37515,9.13262 q 0.078,0.30107 0.17842,0.81401 0.11144,0.5018 0.22301,1.0928 0.11144,0.591 0.20073,1.15969 0.10041,0.55755 0.14502,0.97013 0.078,-0.55754 0.21187,-1.34926 0.14502,-0.80287 0.30107,-1.59459 0.16724,-0.79171 0.28993,-1.32695 l 2.07408,-8.89845 z"
|
||||
id="path105595-7-3"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 654.71528,822.38321 q 0,1.56112 -0.42374,2.76542 -0.41257,1.2043 -1.21544,2.04062 -0.79173,0.82517 -1.91797,1.24891 -1.11508,0.42374 -2.52011,0.42374 -1.3158,0 -2.41974,-0.42374 -1.09279,-0.42374 -1.90682,-1.24891 -0.80287,-0.83632 -1.2489,-2.04062 -0.43489,-1.2043 -0.43489,-2.76542 0,-2.07409 0.73597,-3.51256 0.73595,-1.43846 2.09636,-2.18557 1.36042,-0.74712 3.24494,-0.74712 1.75069,0 3.09996,0.74712 1.3604,0.74711 2.12982,2.18557 0.78056,1.43847 0.78056,3.51256 z m -8.61967,0 q 0,1.2266 0.26762,2.06292 0.26763,0.83631 0.83632,1.26005 0.56871,0.42374 1.48308,0.42374 0.90323,0 1.46077,-0.42374 0.56871,-0.42374 0.82517,-1.26005 0.26762,-0.83632 0.26762,-2.06292 0,-1.23777 -0.26762,-2.05178 -0.25646,-0.82517 -0.82517,-1.23775 -0.5687,-0.41259 -1.48307,-0.41259 -1.34927,0 -1.96256,0.92553 -0.60216,0.92552 -0.60216,2.77659 z"
|
||||
id="path105597-17-5"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 663.15875,815.93796 q 0.25648,0 0.59102,0.0334 0.34566,0.0222 0.55753,0.0668 l -0.25646,3.18918 q -0.16725,-0.0558 -0.47949,-0.078 -0.30108,-0.0334 -0.52411,-0.0334 -0.65789,0 -1.28235,0.16724 -0.61331,0.16725 -1.10394,0.54639 -0.49064,0.36799 -0.78056,0.98129 -0.27879,0.60214 -0.27879,1.48307 v 6.34489 h -3.40103 v -12.46675 h 2.57588 l 0.50178,2.09639 h 0.16724 q 0.36797,-0.63561 0.91437,-1.15971 0.55755,-0.53525 1.26006,-0.84746 0.71366,-0.32338 1.53882,-0.32338 z"
|
||||
id="path105599-2-6"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 669.19362,811.28802 v 7.76105 q 0,0.7025 -0.0558,1.40503 -0.0558,0.7025 -0.12264,1.40501 h 0.0446 q 0.34568,-0.49064 0.70252,-0.97013 0.36797,-0.47949 0.78057,-0.92554 l 3.49023,-3.79132 h 3.83593 l -4.95101,5.40822 5.25208,7.05854 h -3.92513 l -3.5906,-5.05137 -1.46078,1.17085 v 3.88052 h -3.40103 v -17.35086 z"
|
||||
id="path105601-7-2"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 686.97604,818.72569 h -2.94385 v 9.91319 h -3.40103 v -9.91319 h -1.87336 v -1.63918 l 1.87336,-0.91439 v -0.91437 q 0,-1.59458 0.53524,-2.4755 0.54639,-0.89209 1.52768,-1.24891 0.99243,-0.36799 2.34169,-0.36799 0.99245,0 1.80646,0.16725 0.81401,0.15605 1.32696,0.35684 l -0.86978,2.4978 q -0.39027,-0.12263 -0.84747,-0.22301 -0.45719,-0.10041 -1.04819,-0.10041 -0.71365,0 -1.04818,0.43488 -0.32338,0.42374 -0.32338,1.09279 v 0.78056 h 2.94385 z"
|
||||
id="path105603-22-9"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 693.05548,828.63888 h -3.40103 v -17.35086 h 3.40103 z"
|
||||
id="path105605-6-1"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 706.62835,822.38321 q 0,1.56112 -0.42372,2.76542 -0.41259,1.2043 -1.21546,2.04062 -0.79171,0.82517 -1.91797,1.24891 -1.11508,0.42374 -2.5201,0.42374 -1.31581,0 -2.41975,-0.42374 -1.09279,-0.42374 -1.90682,-1.24891 -0.80287,-0.83632 -1.24889,-2.04062 -0.4349,-1.2043 -0.4349,-2.76542 0,-2.07409 0.73597,-3.51256 0.73595,-1.43846 2.09637,-2.18557 1.36041,-0.74712 3.24493,-0.74712 1.75069,0 3.09996,0.74712 1.3604,0.74711 2.12982,2.18557 0.78056,1.43847 0.78056,3.51256 z m -8.61967,0 q 0,1.2266 0.26763,2.06292 0.26762,0.83631 0.83631,1.26005 0.56871,0.42374 1.48309,0.42374 0.90323,0 1.46076,-0.42374 0.56871,-0.42374 0.82517,-1.26005 0.26762,-0.83632 0.26762,-2.06292 0,-1.23777 -0.26762,-2.05178 -0.25646,-0.82517 -0.82517,-1.23775 -0.5687,-0.41259 -1.48307,-0.41259 -1.34925,0 -1.96256,0.92553 -0.60216,0.92552 -0.60216,2.77659 z"
|
||||
id="path105607-1-2"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
d="m 719.02496,828.63888 -0.95898,-4.36001 q -0.078,-0.39029 -0.25648,-1.14855 -0.17842,-0.76942 -0.39027,-1.6392 -0.20073,-0.88092 -0.37915,-1.62804 -0.16724,-0.7471 -0.24532,-1.09278 h -0.10041 q -0.078,0.34568 -0.24532,1.09278 -0.16724,0.74712 -0.37913,1.62804 -0.20071,0.88093 -0.37913,1.66149 -0.17842,0.76942 -0.26762,1.17085 l -1.00359,4.31542 h -3.6575 l -3.546,-12.46676 h 3.38989 l 1.43847,5.51973 q 0.14503,0.57985 0.27877,1.38272 0.13383,0.7917 0.23417,1.53882 0.11144,0.73597 0.16725,1.17085 h 0.0892 q 0.0222,-0.32338 0.0892,-0.85862 0.078,-0.53525 0.16725,-1.10394 0.10041,-0.57985 0.17842,-1.03704 0.0892,-0.46835 0.13383,-0.63561 l 1.53882,-5.97691 h 3.74671 l 1.46078,5.97691 q 0.078,0.32338 0.20071,1.02588 0.13382,0.70252 0.23417,1.44964 0.10041,0.73595 0.11144,1.15969 h 0.0892 q 0.0446,-0.37913 0.15604,-1.12624 0.11144,-0.74712 0.25648,-1.56115 0.15605,-0.82515 0.31223,-1.405 l 1.49423,-5.51973 h 3.33412 l -3.5906,12.46676 z"
|
||||
id="path105609-0-7"
|
||||
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px" />
|
||||
<path
|
||||
id="path73257-7-2-9-159-9"
|
||||
style="vector-effect:none;fill:#126d82;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:4.433;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="m 574.18839,797.73763 v 0.91595 c 0.96107,0.84548 1.66181,1.94689 2.02041,3.17567 h -17.54738 c -3.11685,0 -5.68164,2.56482 -5.68164,5.68159 v 3.91192 c 0,3.11682 2.56479,5.68164 5.68164,5.68164 h 5.6851 4.69113 5.6878 c 1.21493,0 2.13344,0.91846 2.13344,2.13344 v 3.91632 c 0,1.21498 -0.91851,2.13346 -2.13344,2.13346 h -14.79076 c -0.75878,-2.29982 -2.93713,-3.97943 -5.47735,-3.97943 -3.16218,0 -5.76138,2.60267 -5.76138,5.76489 0,3.1622 2.5992,5.76399 5.76138,5.76399 2.54974,0 4.73517,-1.69176 5.48615,-4.00482 h 14.78196 c 3.1168,0 5.67808,-2.5613 5.67808,-5.67809 v -3.91632 c 0,-3.11677 -2.56128,-5.68164 -5.67808,-5.68164 h -5.6878 -4.69113 -5.6851 c -1.21497,0 -2.13609,-0.91837 -2.13609,-2.13344 v -3.91192 c 0,-1.21499 0.92112,-2.13696 2.13609,-2.13696 h 17.60609 c -0.33391,1.31874 -1.05865,2.50576 -2.07912,3.4053 v 0.68721 l 11.72877,-5.86483 z m -19.73105,27.11871 c 1.24555,0 2.21936,0.97116 2.21936,2.21674 0,1.24556 -0.97381,2.21936 -2.21936,2.21936 -1.24559,0 -2.21675,-0.9738 -2.21675,-2.21936 0,-1.24558 0.97116,-2.21674 2.21675,-2.21674 z"
|
||||
sodipodi:nodetypes="cccssssccsssscssscssssccsssscccccsssss" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.18046px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.448779px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="587.66064"
|
||||
y="805.95227"
|
||||
id="text109870-49-3"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan109868-0-6"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.448779px"
|
||||
x="587.66064"
|
||||
y="805.95227">Draw the code</tspan></text>
|
||||
</g>
|
||||
<path
|
||||
d="m 129.58873,798.16413 q 2.10753,0 3.40104,1.63919 1.30466,1.63918 1.30466,4.80606 0,2.11867 -0.6133,3.56829 -0.61331,1.43847 -1.69494,2.17443 -1.08165,0.73597 -2.48666,0.73597 -0.90323,0 -1.54998,-0.22302 -0.64675,-0.23417 -1.10395,-0.591 -0.45719,-0.36798 -0.79171,-0.78057 h -0.17842 q 0.0892,0.44604 0.13383,0.91439 0.0446,0.46833 0.0446,0.91437 v 5.02907 h -3.40104 v -17.95302 h 2.76543 l 0.47949,1.6169 h 0.15605 q 0.33452,-0.5018 0.81401,-0.92554 0.47949,-0.42374 1.14855,-0.66905 0.68021,-0.25647 1.57228,-0.25647 z m -1.09279,2.72082 q -0.89207,0 -1.41617,0.36799 -0.52408,0.36798 -0.76942,1.10393 -0.23416,0.73597 -0.25646,1.86222 v 0.36798 q 0,1.2043 0.22301,2.04062 0.23418,0.83631 0.76942,1.27121 0.54639,0.43488 1.49423,0.43488 0.78056,0 1.28236,-0.43488 0.50179,-0.4349 0.74711,-1.27121 0.25646,-0.84748 0.25646,-2.06292 0,-1.82875 -0.56869,-2.75428 -0.56869,-0.92554 -1.76185,-0.92554 z"
|
||||
id="path105589-78"
|
||||
@ -1029,5 +1126,29 @@
|
||||
x="141.17513"
|
||||
y="396.69467"
|
||||
style="fill:#19788f;fill-opacity:1" />
|
||||
<circle
|
||||
style="color:#000000;overflow:visible;fill:none;fill-rule:evenodd;stroke:#126d82;stroke-width:3.5;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="path1309-1"
|
||||
cx="636.84607"
|
||||
cy="925.89471"
|
||||
r="0" />
|
||||
<circle
|
||||
style="color:#000000;overflow:visible;fill:none;fill-rule:evenodd;stroke:#126d82;stroke-width:3.5;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="path1309-28"
|
||||
cx="622.87994"
|
||||
cy="928.04297"
|
||||
r="0" />
|
||||
<circle
|
||||
style="color:#000000;overflow:visible;fill:none;fill-rule:evenodd;stroke:#126d82;stroke-width:3.5;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="path1309-2"
|
||||
cx="622.87994"
|
||||
cy="928.04297"
|
||||
r="0" />
|
||||
<circle
|
||||
style="color:#000000;overflow:visible;fill:none;fill-rule:evenodd;stroke:#126d82;stroke-width:3.5;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="path1309-9"
|
||||
cx="622.87994"
|
||||
cy="928.04297"
|
||||
r="0" />
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 7.0 MiB After Width: | Height: | Size: 7.0 MiB |
@ -2,7 +2,6 @@
|
||||
|
||||
import unittest
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
@ -86,8 +85,7 @@ class ApprovalsTest(BpmnWorkflowTestCase):
|
||||
self.save_restore()
|
||||
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
|
||||
self.do_next_exclusive_step('Parallel_Approvals_SP.Step1')
|
||||
self.do_next_exclusive_step(
|
||||
'Parallel_Approvals_SP.Supervisor_Approval')
|
||||
self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval')
|
||||
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
|
||||
|
||||
def testSaveRestoreWaiting(self):
|
||||
@ -108,93 +106,10 @@ class ApprovalsTest(BpmnWorkflowTestCase):
|
||||
self.save_restore()
|
||||
self.do_next_exclusive_step('Parallel_Approvals_SP.Step1')
|
||||
self.save_restore()
|
||||
self.do_next_exclusive_step(
|
||||
'Parallel_Approvals_SP.Supervisor_Approval')
|
||||
self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval')
|
||||
self.save_restore()
|
||||
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
|
||||
|
||||
def testReadonlyWaiting(self):
|
||||
|
||||
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(1, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual('Approvals.First_Approval_Wins_Done',
|
||||
readonly.get_ready_user_tasks()[0].task_spec.name)
|
||||
self.assertRaises(AssertionError, readonly.do_engine_steps)
|
||||
self.assertRaises(AssertionError, readonly.refresh_waiting_tasks)
|
||||
self.assertRaises(AssertionError, readonly.catch, MessageEventDefinition('Cheese'))
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
|
||||
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(2, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual(
|
||||
['Approvals.Manager_Approval__P_',
|
||||
'Approvals.Supervisor_Approval__P_'],
|
||||
sorted(t.task_spec.name for t in readonly.get_ready_user_tasks()))
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
|
||||
self.do_next_named_step('Approvals.Supervisor_Approval__P_')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(1, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual('Approvals.Manager_Approval__P_',
|
||||
readonly.get_ready_user_tasks()[0].task_spec.name)
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
self.do_next_named_step('Approvals.Manager_Approval__P_')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(1, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual('Approvals.Parallel_Approvals_Done',
|
||||
readonly.get_ready_user_tasks()[0].task_spec.name)
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
self.do_next_exclusive_step('Approvals.Parallel_Approvals_Done')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(2, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual(
|
||||
['Parallel_Approvals_SP.Manager_Approval',
|
||||
'Parallel_Approvals_SP.Step1'],
|
||||
sorted(t.task_spec.name for t in readonly.get_ready_user_tasks()))
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(1, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual('Parallel_Approvals_SP.Step1',
|
||||
readonly.get_ready_user_tasks()[0].task_spec.name)
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
self.do_next_exclusive_step('Parallel_Approvals_SP.Step1')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(1, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual('Parallel_Approvals_SP.Supervisor_Approval',
|
||||
readonly.get_ready_user_tasks()[0].task_spec.name)
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
self.do_next_exclusive_step(
|
||||
'Parallel_Approvals_SP.Supervisor_Approval')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(1, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual('Approvals.Parallel_SP_Done',
|
||||
readonly.get_ready_user_tasks()[0].task_spec.name)
|
||||
self.assertRaises(
|
||||
AssertionError, readonly.get_ready_user_tasks()[0].complete)
|
||||
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
|
||||
|
||||
readonly = self.get_read_only_workflow()
|
||||
self.assertEqual(0, len(readonly.get_ready_user_tasks()))
|
||||
self.assertEqual(0, len(readonly.get_waiting_tasks()))
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ApprovalsTest)
|
||||
|
@ -1,118 +0,0 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from .BpmnLoaderForTests import TestBpmnParser
|
||||
|
||||
|
||||
class BpmnSerializerTest(unittest.TestCase):
|
||||
CORRELATE = BpmnSerializer
|
||||
|
||||
def load_workflow_spec(self, filename, process_name):
|
||||
f = os.path.join(os.path.dirname(__file__), 'data', filename)
|
||||
parser = TestBpmnParser()
|
||||
parser.add_bpmn_files_by_glob(f)
|
||||
top_level_spec = parser.get_spec(process_name)
|
||||
subprocesses = parser.get_subprocess_specs(process_name)
|
||||
return top_level_spec, subprocesses
|
||||
|
||||
def setUp(self):
|
||||
super(BpmnSerializerTest, self).setUp()
|
||||
self.serializer = BpmnSerializer()
|
||||
self.spec, subprocesses = self.load_workflow_spec('random_fact.bpmn', 'random_fact')
|
||||
self.workflow = BpmnWorkflow(self.spec, subprocesses)
|
||||
|
||||
def testDeserializeWorkflowSpec(self):
|
||||
self.assertIsNotNone(self.spec)
|
||||
|
||||
def testSerializeWorkflowSpec(self):
|
||||
spec_serialized = self.serializer.serialize_workflow_spec(self.spec)
|
||||
result = self.serializer.deserialize_workflow_spec(spec_serialized)
|
||||
spec_serialized2 = self.serializer.serialize_workflow_spec(result)
|
||||
self.assertEqual(spec_serialized, spec_serialized2)
|
||||
|
||||
def testSerializeWorkflow(self):
|
||||
json = self.serializer.serialize_workflow(self.workflow)
|
||||
print(json)
|
||||
|
||||
def testDeserializeWorkflow(self):
|
||||
self._compare_with_deserialized_copy(self.workflow)
|
||||
|
||||
def testDeserializeCallActivityChildren(self):
|
||||
"""Tested as a part of deserialize workflow."""
|
||||
pass
|
||||
|
||||
def testSerializeTask(self):
|
||||
json = self.serializer.serialize_workflow(self.workflow)
|
||||
print(json)
|
||||
|
||||
def testDeserializeTask(self):
|
||||
self._compare_with_deserialized_copy(self.workflow)
|
||||
|
||||
def testDeserializeActiveWorkflow(self):
|
||||
self.workflow.do_engine_steps()
|
||||
self._compare_with_deserialized_copy(self.workflow)
|
||||
|
||||
def testDeserializeWithData(self):
|
||||
self.workflow.data["test"] = "my_test"
|
||||
json = self.serializer.serialize_workflow(self.workflow)
|
||||
wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec)
|
||||
self.assertEqual('my_test', wf2.get_data("test"))
|
||||
|
||||
def testDeserializeWithDefaultScriptEngineClass(self):
|
||||
json = self.serializer.serialize_workflow(self.workflow)
|
||||
wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec)
|
||||
self.assertIsNotNone(self.workflow.script_engine)
|
||||
self.assertIsNotNone(wf2.script_engine)
|
||||
self.assertEqual(self.workflow.script_engine.__class__,
|
||||
wf2.script_engine.__class__)
|
||||
|
||||
@unittest.skip("Deserialize does not persist the script engine, Fix me.")
|
||||
def testDeserializeWithCustomScriptEngine(self):
|
||||
class CustomScriptEngine(PythonScriptEngine):
|
||||
pass
|
||||
|
||||
self.workflow.script_engine = CustomScriptEngine()
|
||||
json = self.serializer.serialize_workflow(self.workflow)
|
||||
wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec)
|
||||
self.assertEqual(self.workflow.script_engine.__class__,
|
||||
wf2.script_engine.__class__)
|
||||
|
||||
def testDeserializeWithDataOnTask(self):
|
||||
self.workflow.do_engine_steps()
|
||||
user_task = self.workflow.get_ready_user_tasks()[0]
|
||||
user_task.data = {"test":"my_test"}
|
||||
self._compare_with_deserialized_copy(self.workflow)
|
||||
|
||||
def testLastTaskIsSetAndWorksThroughRestore(self):
|
||||
self.workflow.do_engine_steps()
|
||||
json = self.serializer.serialize_workflow(self.workflow)
|
||||
wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec)
|
||||
self.assertIsNotNone(self.workflow.last_task)
|
||||
self.assertIsNotNone(wf2.last_task)
|
||||
self._compare_workflows(self.workflow, wf2)
|
||||
|
||||
def _compare_with_deserialized_copy(self, wf):
|
||||
json = self.serializer.serialize_workflow(wf)
|
||||
wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec)
|
||||
self._compare_workflows(wf, wf2)
|
||||
|
||||
def _compare_workflows(self, w1, w2):
|
||||
self.assertIsInstance(w1, BpmnWorkflow)
|
||||
self.assertIsInstance(w2, BpmnWorkflow)
|
||||
self.assertEqual(w1.data, w2.data)
|
||||
self.assertEqual(w1.name, w2.name)
|
||||
for task in w1.get_ready_user_tasks():
|
||||
w2_task = w2.get_task(task.id)
|
||||
self.assertIsNotNone(w2_task)
|
||||
self.assertEqual(task.data, w2_task.data)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(BpmnSerializerTest)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
@ -1,13 +1,11 @@
|
||||
import os
|
||||
import unittest
|
||||
import json
|
||||
from uuid import uuid4
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter
|
||||
|
||||
@ -48,13 +46,6 @@ class BpmnWorkflowSerializerTest(unittest.TestCase):
|
||||
version = self.serializer.get_version(spec_serialized)
|
||||
self.assertEqual(version, self.SERIALIZER_VERSION)
|
||||
|
||||
def testSerializeToOldSerializerThenNewSerializer(self):
|
||||
old_serializer = BpmnSerializer()
|
||||
old_json = old_serializer.serialize_workflow(self.workflow)
|
||||
new_workflow = old_serializer.deserialize_workflow(old_json)
|
||||
new_json = self.serializer.serialize_json(new_workflow)
|
||||
new_workflow_2 = self.serializer.deserialize_json(new_json)
|
||||
|
||||
def testSerializeWorkflow(self):
|
||||
serialized = self.serializer.serialize_json(self.workflow)
|
||||
json.loads(serialized)
|
||||
|
@ -122,7 +122,7 @@ class BpmnWorkflowTestCase(unittest.TestCase):
|
||||
before_dump = self.workflow.get_dump()
|
||||
# Check that we can actully convert this to JSON
|
||||
json_str = json.dumps(before_state)
|
||||
after = self.serializer.workflow_from_dict(json.loads(json_str), read_only=False)
|
||||
after = self.serializer.workflow_from_dict(json.loads(json_str))
|
||||
# Check that serializing and deserializing results in the same workflow
|
||||
after_state = self.serializer.workflow_to_dict(after)
|
||||
after_dump = after.get_dump()
|
||||
@ -132,11 +132,7 @@ class BpmnWorkflowTestCase(unittest.TestCase):
|
||||
self.workflow = after
|
||||
|
||||
def restore(self, state):
|
||||
self.workflow = self.serializer.workflow_from_dict(state, read_only=False)
|
||||
|
||||
def get_read_only_workflow(self):
|
||||
state = self._get_workflow_state()
|
||||
return self.serializer.workflow_from_dict(state, read_only=True)
|
||||
self.workflow = self.serializer.workflow_from_dict(state)
|
||||
|
||||
def _get_workflow_state(self, do_steps=True):
|
||||
if do_steps:
|
||||
|
@ -3,9 +3,9 @@
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
@ -60,7 +60,7 @@ class CallActivityTest(BpmnWorkflowTestCase):
|
||||
def test_call_acitivity_errors_include_task_trace(self):
|
||||
error_spec = self.subprocesses.get('ErroringBPMN')
|
||||
error_spec, subprocesses = self.load_workflow_spec('call_activity_*.bpmn', 'ErroringBPMN')
|
||||
with self.assertRaises(WorkflowTaskExecException) as context:
|
||||
with self.assertRaises(WorkflowTaskException) as context:
|
||||
self.workflow = BpmnWorkflow(error_spec, subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEquals(2, len(context.exception.task_trace))
|
||||
|
@ -1,10 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'McDonald, danfunk'
|
||||
@ -46,7 +46,7 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase):
|
||||
def test_overwrite_function_with_local_variable(self):
|
||||
ready_task = self.workflow.get_tasks(TaskState.READY)[0]
|
||||
ready_task.data = {'custom_function': "bill"}
|
||||
with self.assertRaises(WorkflowTaskExecException) as e:
|
||||
with self.assertRaises(WorkflowTaskException) as e:
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue('' in str(e.exception))
|
||||
self.assertTrue('custom_function' in str(e.exception))
|
||||
|
@ -31,18 +31,11 @@ class InvalidWorkflowsTest(BpmnWorkflowTestCase):
|
||||
except ValidationException as ex:
|
||||
self.assertTrue('No start event found' in ('%r' % ex),
|
||||
'\'No start event found\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('No-Start-Event.bpmn20.xml' in ('%r' % ex),
|
||||
self.assertTrue('No-Start-Event.bpmn20.xml' in ex.file_name,
|
||||
'\'No-Start-Event.bpmn20.xml\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('process' in ('%r' % ex),
|
||||
'\'process\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue(
|
||||
'sid-669ddebf-4196-41ee-8b04-bcc90bc5f983' in ('%r' % ex),
|
||||
'\'sid-669ddebf-4196-41ee-8b04-bcc90bc5f983\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('No Start Event' in ('%r' % ex),
|
||||
'\'No Start Event\' should be a substring of error message: \'%r\'' % ex)
|
||||
|
||||
def testSubprocessNotFound(self):
|
||||
|
||||
|
||||
with self.assertRaises(ValidationException) as exc:
|
||||
self.load_workflow_spec('Invalid-Workflows/Subprocess-Not-Found.bpmn20.xml', 'Subprocess Not Found')
|
||||
self.assertIn("The process 'Missing subprocess' was not found.", str(exc))
|
||||
@ -60,15 +53,12 @@ class InvalidWorkflowsTest(BpmnWorkflowTestCase):
|
||||
'There is no support implemented for this task type' in (
|
||||
'%r' % ex),
|
||||
'\'There is no support implemented for this task type\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('Unsupported-Task.bpmn20.xml' in ('%r' % ex),
|
||||
self.assertTrue('Unsupported-Task.bpmn20.xml' in ex.file_name,
|
||||
'\'Unsupported-Task.bpmn20.xml\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('businessRuleTask' in ('%r' % ex),
|
||||
'\'businessRuleTask\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue(
|
||||
'sid-75EEAB28-3B69-4282-B91A-0F3C97931834' in ('%r' % ex),
|
||||
'\'sid-75EEAB28-3B69-4282-B91A-0F3C97931834\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('Business Rule Task' in ('%r' % ex),
|
||||
'\'Business Rule Task\' should be a substring of error message: \'%r\'' % ex)
|
||||
self.assertTrue('businessRuleTask' in ex.tag,
|
||||
'\'businessRuleTask\' should be a substring of the tag: \'%r\'' % ex)
|
||||
self.assertTrue('Business Rule Task' in ex.name,
|
||||
'\'Business Rule Task\' should be the name: \'%s\'' % ex.name)
|
||||
|
||||
|
||||
def suite():
|
||||
|
@ -1,7 +1,8 @@
|
||||
import unittest
|
||||
import os
|
||||
|
||||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
|
||||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser, BpmnValidator
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
|
||||
|
||||
class ParserTest(unittest.TestCase):
|
||||
@ -27,3 +28,17 @@ class ParserTest(unittest.TestCase):
|
||||
self.assertEqual(generate.data_output_associations[0].name, 'obj_1')
|
||||
self.assertEqual(len(read.data_input_associations), 1)
|
||||
self.assertEqual(read.data_input_associations[0].name, 'obj_1')
|
||||
|
||||
def testValidatorError(self):
|
||||
parser = BpmnParser(validator=BpmnValidator())
|
||||
bpmn_file = os.path.join(os.path.dirname(__file__), 'data',
|
||||
'data_object_invalid.bpmn')
|
||||
errored = False
|
||||
try:
|
||||
parser.add_bpmn_file(bpmn_file)
|
||||
except ValidationException as ex:
|
||||
errored = True
|
||||
self.assertEqual(ex.file_name, bpmn_file)
|
||||
self.assertEqual(14, ex.line_number)
|
||||
self.assertIn('DataObjectReference_0cm8dnh', str(ex))
|
||||
assert(errored, "This should have errored out with a validation exception.")
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
@ -39,7 +39,7 @@ class InlineScriptTest(BpmnWorkflowTestCase):
|
||||
# StartTask doesn't know about testvar, it happened earlier.
|
||||
# calling an exec that references testvar, in the context of the
|
||||
# start task should fail.
|
||||
with self.assertRaises(WorkflowTaskExecException):
|
||||
with self.assertRaises(WorkflowTaskException):
|
||||
result = self.workflow.script_engine.evaluate(startTask, 'testvar == True')
|
||||
|
||||
|
||||
|
@ -8,16 +8,15 @@ sys.path.insert(0, os.path.join(dirname, '..', '..', '..'))
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
class ServiceTaskTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
||||
spec, subprocesses = self.load_workflow_spec('service_task.bpmn',
|
||||
spec, subprocesses = self.load_workflow_spec('service_task.bpmn',
|
||||
'service_task_example1')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.workflow.do_engine_steps()
|
||||
|
@ -0,0 +1,152 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
|
||||
<bpmn:process id="Process" isExecutable="true">
|
||||
<bpmn:dataObject id="obj_1" />
|
||||
<bpmn:startEvent id="Event_0kmwi7u">
|
||||
<bpmn:outgoing>Flow_18858hr</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_18858hr" sourceRef="Event_0kmwi7u" targetRef="generate_data" />
|
||||
<bpmn:sequenceFlow id="Flow_0gbxq9s" sourceRef="generate_data" targetRef="task_1" />
|
||||
<bpmn:sequenceFlow id="Flow_1r7v9yo" sourceRef="task_1" targetRef="read_data" />
|
||||
<!-- All the Data objects here have the same id, which is not valid according the to the xml schema. -->
|
||||
<bpmn:dataObjectReference id="DataObjectReference_0cm8dnh" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:dataObjectReference id="DataObjectReference_0cm8dnh" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:dataObjectReference id="DataObjectReference_0cm8dnh" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:endEvent id="Event_0qw1yr0">
|
||||
<bpmn:incoming>Flow_19pyf8s</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1tnu3ej" sourceRef="read_data" targetRef="subprocess" />
|
||||
<bpmn:userTask id="read_data" name="Read Data">
|
||||
<bpmn:incoming>Flow_1r7v9yo</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1tnu3ej</bpmn:outgoing>
|
||||
<bpmn:property id="Property_1uusomz" name="__targetRef_placeholder" />
|
||||
<bpmn:dataInputAssociation id="DataInputAssociation_1vaag83">
|
||||
<bpmn:sourceRef>DataObjectReference_0pztwm3</bpmn:sourceRef>
|
||||
<bpmn:targetRef>Property_1uusomz</bpmn:targetRef>
|
||||
</bpmn:dataInputAssociation>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="generate_data" name="Generate Data">
|
||||
<bpmn:incoming>Flow_18858hr</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0gbxq9s</bpmn:outgoing>
|
||||
<bpmn:dataOutputAssociation id="DataOutputAssociation_053pozp">
|
||||
<bpmn:targetRef>DataObjectReference_17fhr1j</bpmn:targetRef>
|
||||
</bpmn:dataOutputAssociation>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="task_1" name="Task">
|
||||
<bpmn:incoming>Flow_0gbxq9s</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1r7v9yo</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:subProcess id="subprocess" name="Subprocess">
|
||||
<bpmn:incoming>Flow_1tnu3ej</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_19pyf8s</bpmn:outgoing>
|
||||
<bpmn:property id="Property_1q5wp77" name="__targetRef_placeholder" />
|
||||
<bpmn:dataInputAssociation id="DataInputAssociation_0w2qahx">
|
||||
<bpmn:sourceRef>DataObjectReference_0cm8dnh</bpmn:sourceRef>
|
||||
<bpmn:targetRef>Property_1q5wp77</bpmn:targetRef>
|
||||
</bpmn:dataInputAssociation>
|
||||
<bpmn:startEvent id="Event_1wuwx2f">
|
||||
<bpmn:outgoing>Flow_0yx8lkz</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="placeholder">
|
||||
<bpmn:incoming>Flow_0yx8lkz</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0rk4i35</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0yx8lkz" sourceRef="Event_1wuwx2f" targetRef="placeholder" />
|
||||
<bpmn:endEvent id="Event_1qcnmnt">
|
||||
<bpmn:incoming>Flow_0rk4i35</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0rk4i35" sourceRef="placeholder" targetRef="Event_1qcnmnt" />
|
||||
</bpmn:subProcess>
|
||||
<bpmn:sequenceFlow id="Flow_19pyf8s" sourceRef="subprocess" targetRef="Event_0qw1yr0" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process">
|
||||
<bpmndi:BPMNEdge id="Flow_19pyf8s_di" bpmnElement="Flow_19pyf8s">
|
||||
<di:waypoint x="1110" y="180" />
|
||||
<di:waypoint x="1192" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1tnu3ej_di" bpmnElement="Flow_1tnu3ej">
|
||||
<di:waypoint x="680" y="180" />
|
||||
<di:waypoint x="740" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1r7v9yo_di" bpmnElement="Flow_1r7v9yo">
|
||||
<di:waypoint x="520" y="180" />
|
||||
<di:waypoint x="580" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0gbxq9s_di" bpmnElement="Flow_0gbxq9s">
|
||||
<di:waypoint x="360" y="180" />
|
||||
<di:waypoint x="420" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_18858hr_di" bpmnElement="Flow_18858hr">
|
||||
<di:waypoint x="208" y="180" />
|
||||
<di:waypoint x="260" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_0kmwi7u_di" bpmnElement="Event_0kmwi7u">
|
||||
<dc:Bounds x="172" y="162" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="DataObjectReference_17fhr1j_di" bpmnElement="DataObjectReference_17fhr1j">
|
||||
<dc:Bounds x="292" y="275" width="36" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="299" y="332" width="25" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="DataObjectReference_0pztwm3_di" bpmnElement="DataObjectReference_0pztwm3">
|
||||
<dc:Bounds x="612" y="275" width="36" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="619" y="332" width="25" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="DataObjectReference_0cm8dnh_di" bpmnElement="DataObjectReference_0cm8dnh">
|
||||
<dc:Bounds x="907" y="375" width="36" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="913" y="432" width="25" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0qw1yr0_di" bpmnElement="Event_0qw1yr0">
|
||||
<dc:Bounds x="1192" y="162" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_08lb08m_di" bpmnElement="read_data">
|
||||
<dc:Bounds x="580" y="140" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_00pwxgv_di" bpmnElement="generate_data">
|
||||
<dc:Bounds x="260" y="140" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1xmp3i1_di" bpmnElement="task_1">
|
||||
<dc:Bounds x="420" y="140" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_10c32lr_di" bpmnElement="subprocess" isExpanded="true">
|
||||
<dc:Bounds x="740" y="80" width="370" height="200" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0rk4i35_di" bpmnElement="Flow_0rk4i35">
|
||||
<di:waypoint x="970" y="180" />
|
||||
<di:waypoint x="1032" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0yx8lkz_di" bpmnElement="Flow_0yx8lkz">
|
||||
<di:waypoint x="816" y="180" />
|
||||
<di:waypoint x="870" y="180" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_1wuwx2f_di" bpmnElement="Event_1wuwx2f">
|
||||
<dc:Bounds x="780" y="162" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_01ooqcb_di" bpmnElement="placeholder">
|
||||
<dc:Bounds x="870" y="140" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1qcnmnt_di" bpmnElement="Event_1qcnmnt">
|
||||
<dc:Bounds x="1032" y="162" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="DataInputAssociation_1vaag83_di" bpmnElement="DataInputAssociation_1vaag83">
|
||||
<di:waypoint x="630" y="275" />
|
||||
<di:waypoint x="630" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="DataOutputAssociation_053pozp_di" bpmnElement="DataOutputAssociation_053pozp">
|
||||
<di:waypoint x="309" y="220" />
|
||||
<di:waypoint x="308" y="275" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="DataInputAssociation_0w2qahx_di" bpmnElement="DataInputAssociation_0w2qahx">
|
||||
<di:waypoint x="925" y="375" />
|
||||
<di:waypoint x="925" y="280" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,73 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import str
|
||||
import sys
|
||||
import unittest
|
||||
import os
|
||||
dirname = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.join(dirname, '..', '..', '..', '..'))
|
||||
|
||||
import uuid
|
||||
from SpiffWorkflow.bpmn.serializer.dict import BPMNDictionarySerializer
|
||||
from tests.SpiffWorkflow.serializer.baseTest import SerializerTest
|
||||
from SpiffWorkflow.workflow import Workflow
|
||||
|
||||
|
||||
class BPMNDictionarySerializerTest(SerializerTest):
|
||||
|
||||
def setUp(self):
|
||||
super(BPMNDictionarySerializerTest, self).setUp()
|
||||
self.serializer = BPMNDictionarySerializer()
|
||||
self.return_type = dict
|
||||
|
||||
def _compare_results(self, item1, item2,
|
||||
exclude_dynamic=False,
|
||||
exclude_items=None):
|
||||
exclude_items = exclude_items if exclude_items is not None else []
|
||||
if exclude_dynamic:
|
||||
if 'last_state_change' not in exclude_items:
|
||||
exclude_items.append('last_state_change')
|
||||
if 'last_task' not in exclude_items:
|
||||
exclude_items.append('last_task')
|
||||
if uuid.UUID not in exclude_items:
|
||||
exclude_items.append(uuid.UUID)
|
||||
if type(item1) in exclude_items:
|
||||
return
|
||||
|
||||
if isinstance(item1, dict):
|
||||
self.assertIsInstance(item2, dict)
|
||||
for key, value in list(item1.items()):
|
||||
self.assertIn(key, item2)
|
||||
if key in exclude_items:
|
||||
continue
|
||||
self._compare_results(value, item2[key],
|
||||
exclude_dynamic=exclude_dynamic,
|
||||
exclude_items=exclude_items)
|
||||
for key in item2:
|
||||
self.assertIn(key, item1)
|
||||
|
||||
elif isinstance(item1, list):
|
||||
msg = "item is not a list (is a " + str(type(item2)) + ")"
|
||||
self.assertIsInstance(item2, list, msg)
|
||||
msg = "list lengths differ: {} vs {}".format(
|
||||
len(item1), len(item2))
|
||||
self.assertEqual(len(item1), len(item2), msg)
|
||||
for i, listitem in enumerate(item1):
|
||||
self._compare_results(listitem, item2[i],
|
||||
exclude_dynamic=exclude_dynamic,
|
||||
exclude_items=exclude_items)
|
||||
|
||||
elif isinstance(item1, Workflow):
|
||||
raise Exception("Item is a Workflow")
|
||||
|
||||
else:
|
||||
msg = "{}: types differ: {} vs {}".format(
|
||||
str(item2), type(item1), type(item2))
|
||||
self.assertEqual(type(item1), type(item2), msg)
|
||||
self.assertEqual(item1, item2)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.defaultTestLoader.loadTestsFromTestCase(BPMNDictionarySerializerTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
@ -1,38 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
import os
|
||||
dirname = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.join(dirname, '..', '..', '..', '..'))
|
||||
|
||||
import json
|
||||
from SpiffWorkflow.bpmn.serializer.json import BPMNJSONSerializer
|
||||
from tests.SpiffWorkflow.serializer.dictTest import DictionarySerializerTest
|
||||
|
||||
|
||||
class BPMNJSONSerializerTest(DictionarySerializerTest):
|
||||
|
||||
def setUp(self):
|
||||
super(BPMNJSONSerializerTest, self).setUp()
|
||||
self.serializer = BPMNJSONSerializer()
|
||||
self.return_type = str
|
||||
|
||||
def _prepare_result(self, item):
|
||||
return json.loads(item)
|
||||
|
||||
def _compare_results(self, item1, item2, exclude_dynamic=False,
|
||||
exclude_items=None):
|
||||
if exclude_dynamic:
|
||||
exclude_items = ['__uuid__']
|
||||
else:
|
||||
exclude_items = []
|
||||
super(BPMNJSONSerializerTest, self)._compare_results(item1, item2,
|
||||
exclude_dynamic=exclude_dynamic,
|
||||
exclude_items=exclude_items)
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.defaultTestLoader.loadTestsFromTestCase(BPMNJSONSerializerTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
@ -1,7 +1,7 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException, WorkflowException
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
|
||||
@ -15,17 +15,23 @@ class BusinessRuleTaskParserTest(BaseTestCase):
|
||||
'invalid/InvalidDecision.bpmn', 'Process_1', 'invalid_decision.dmn')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testExceptionPrint(self):
|
||||
e1 = Exception("test 1")
|
||||
print (e1)
|
||||
e = SpiffWorkflowException("test")
|
||||
print (e)
|
||||
|
||||
def testDmnRaisesTaskErrors(self):
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
self.workflow.get_tasks(TaskState.READY)[0].set_data(x=3)
|
||||
try:
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(False, "An error should have been raised.")
|
||||
except WorkflowTaskExecException as we:
|
||||
except WorkflowException as we:
|
||||
self.assertTrue(True, "An error was raised..")
|
||||
self.assertEquals("InvalidDecisionTaskId", we.sender.name)
|
||||
self.maxDiff = 1000
|
||||
self.assertEquals("Error evaluating expression spam= 1", str(we))
|
||||
self.assertEqual("InvalidDecisionTaskId", we.task_spec.name)
|
||||
self.maxDiff = 1000
|
||||
self.assertEquals("Error evaluating expression 'spam= 1'. Rule failed on row 1. Business Rule Task 'Invalid Decision'.", str(we))
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(BusinessRuleTaskParserTest)
|
||||
|
@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_1" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_1" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0">
|
||||
<bpmn:process id="Process_1" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1b29lxw</bpmn:outgoing>
|
||||
@ -46,59 +46,59 @@ of documentation</bpmn:documentation>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="122" y="106" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="164" y="322" width="90" height="20" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_0n32cxd_di" bpmnElement="EndEvent_0n32cxd">
|
||||
<dc:Bounds x="1132" y="106" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="933" y="505" width="0" height="12" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_066d5e1_di" bpmnElement="Flow_066d5e1">
|
||||
<di:waypoint x="850" y="124" />
|
||||
<di:waypoint x="930" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0z7tfh1_di" bpmnElement="Flow_0z7tfh1">
|
||||
<di:waypoint x="510" y="124" />
|
||||
<di:waypoint x="575" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_09ciw49_di" bpmnElement="Flow_09ciw49">
|
||||
<di:waypoint x="340" y="124" />
|
||||
<di:waypoint x="410" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0fusz9y_di" bpmnElement="Flow_0fusz9y">
|
||||
<di:waypoint x="1030" y="124" />
|
||||
<di:waypoint x="1092" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1b29lxw_di" bpmnElement="Flow_1b29lxw">
|
||||
<di:waypoint x="188" y="124" />
|
||||
<di:waypoint x="240" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_06fnqj2_di" bpmnElement="SequenceFlow_06fnqj2">
|
||||
<di:waypoint x="650" y="124" />
|
||||
<di:waypoint x="700" y="124" />
|
||||
<di:waypoint x="675" y="124" />
|
||||
<di:waypoint x="750" y="124" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="850" y="462" width="0" height="12" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_1ipm12w_di" bpmnElement="Task_067fajl">
|
||||
<dc:Bounds x="550" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1b29lxw_di" bpmnElement="Flow_1b29lxw">
|
||||
<di:waypoint x="158" y="124" />
|
||||
<di:waypoint x="210" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0fusz9y_di" bpmnElement="Flow_0fusz9y">
|
||||
<di:waypoint x="980" y="124" />
|
||||
<di:waypoint x="1132" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1uk7uyi_di" bpmnElement="Activity_0w0chd2">
|
||||
<dc:Bounds x="880" y="84" width="100" height="80" />
|
||||
<dc:Bounds x="930" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_09ciw49_di" bpmnElement="Flow_09ciw49">
|
||||
<di:waypoint x="310" y="124" />
|
||||
<di:waypoint x="360" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1v5khzq_di" bpmnElement="Activity_0qh0jpg">
|
||||
<dc:Bounds x="210" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0z7tfh1_di" bpmnElement="Flow_0z7tfh1">
|
||||
<di:waypoint x="460" y="124" />
|
||||
<di:waypoint x="550" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_0vne2ba_di" bpmnElement="Activity_1ftn207">
|
||||
<dc:Bounds x="360" y="84" width="100" height="80" />
|
||||
<dc:Bounds x="410" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_066d5e1_di" bpmnElement="Flow_066d5e1">
|
||||
<di:waypoint x="800" y="124" />
|
||||
<di:waypoint x="880" y="124" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_0z6lv9u_di" bpmnElement="Activity_1mu3z8p">
|
||||
<dc:Bounds x="700" y="84" width="100" height="80" />
|
||||
<dc:Bounds x="750" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="152" y="106" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="164" y="322" width="90" height="20" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1v5khzq_di" bpmnElement="Activity_0qh0jpg">
|
||||
<dc:Bounds x="240" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_0n32cxd_di" bpmnElement="EndEvent_0n32cxd">
|
||||
<dc:Bounds x="1092" y="106" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="933" y="505" width="0" height="12" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_1ipm12w_di" bpmnElement="Task_067fajl">
|
||||
<dc:Bounds x="575" y="84" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
|
@ -1,8 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:camunda="http://camunda.org/schema/1.0/dmn" id="definitions_1jblnbx" name="definitions" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<definitions xmlns="https://www.omg.org/spec/DMN/20191111/MODEL/" xmlns:camunda="http://camunda.org/schema/1.0/dmn" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/2.0" id="definitions_1jblnbx" name="definitions" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="5.0.0">
|
||||
<decision id="IntegerDecisionStringOutputTable" name="IntegerDecisionStringOutput">
|
||||
<decisionTable id="decisionTable">
|
||||
<input id="InputClause_1tm0ceq" label="x" camunda:inputVariable="">
|
||||
<input id="InputClause_1tm0ceq" label="x" biodi:width="192" camunda:inputVariable="">
|
||||
<inputExpression id="LiteralExpression_04o7chw" typeRef="integer">
|
||||
<text>item.x</text>
|
||||
</inputExpression>
|
||||
|
@ -1,9 +1,7 @@
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, \
|
||||
EnumFormField
|
||||
from SpiffWorkflow.specs.base import TaskSpec
|
||||
from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, EnumFormField
|
||||
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter
|
||||
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||
|
||||
|
||||
@ -13,7 +11,6 @@ class UserTaskSpecTest(unittest.TestCase):
|
||||
def create_instance(self):
|
||||
if 'testtask' in self.wf_spec.task_specs:
|
||||
del self.wf_spec.task_specs['testtask']
|
||||
task_spec = TaskSpec(self.wf_spec, 'testtask', description='foo')
|
||||
self.form = Form()
|
||||
return UserTask(self.wf_spec, 'userTask', self.form)
|
||||
|
||||
@ -33,43 +30,6 @@ class UserTaskSpecTest(unittest.TestCase):
|
||||
self.assertEqual(self.form, self.user_spec.form)
|
||||
|
||||
def testSerialize(self):
|
||||
pass
|
||||
|
||||
def test_text_field(self):
|
||||
form_field = FormField(form_type="text")
|
||||
form_field.id = "1234"
|
||||
self.form.add_field(form_field)
|
||||
self.assertEqual(form_field, self.user_spec.form.fields[0])
|
||||
|
||||
def test_enum_field(self):
|
||||
enum_field = EnumFormField()
|
||||
enum_field.label = "Which kind of fool are you"
|
||||
enum_field.add_option('old fool', 'This is old, therefor it is good.')
|
||||
enum_field.add_option('new fool',
|
||||
'This is new, therefor it is better.')
|
||||
self.form.add_field(enum_field)
|
||||
self.assertEqual(enum_field, self.user_spec.form.fields[-1])
|
||||
|
||||
def test_properties(self):
|
||||
form_field = FormField(form_type="text")
|
||||
self.assertFalse(form_field.has_property("wilma"))
|
||||
form_field.add_property("wilma", "flintstone")
|
||||
self.assertTrue(form_field.has_property("wilma"))
|
||||
self.assertEquals("flintstone", form_field.get_property("wilma"))
|
||||
|
||||
def test_validations(self):
|
||||
form_field = FormField(form_type="text")
|
||||
self.assertFalse(form_field.has_validation("barney"))
|
||||
form_field.add_validation("barney", "rubble")
|
||||
self.assertTrue(form_field.has_validation("barney"))
|
||||
self.assertEquals("rubble", form_field.get_validation("barney"))
|
||||
|
||||
def testIsEngineTask(self):
|
||||
self.assertFalse(self.user_spec.is_engine_task())
|
||||
|
||||
def test_convert_to_dict(self):
|
||||
form = Form()
|
||||
|
||||
field1 = FormField(form_type="text")
|
||||
field1.id = "quest"
|
||||
field1.label = "What is your quest?"
|
||||
@ -89,21 +49,14 @@ class UserTaskSpecTest(unittest.TestCase):
|
||||
field2.add_property("description", "You know what to do.")
|
||||
field2.add_validation("maxlength", "25")
|
||||
|
||||
form.key = "formKey"
|
||||
form.add_field(field1)
|
||||
form.add_field(field2)
|
||||
self.form.key = "formKey"
|
||||
self.form.add_field(field1)
|
||||
self.form.add_field(field2)
|
||||
|
||||
def JsonableHandler(Obj):
|
||||
if hasattr(Obj, 'jsonable'):
|
||||
return Obj.jsonable()
|
||||
else:
|
||||
raise 'Object of type %s with value of %s is not JSON serializable' % (
|
||||
type(Obj), repr(Obj))
|
||||
|
||||
json_form = json.dumps(form, default=JsonableHandler)
|
||||
actual = json.loads(json_form)
|
||||
|
||||
expected = {
|
||||
converter = UserTaskConverter()
|
||||
dct = converter.to_dict(self.user_spec)
|
||||
self.assertEqual(dct['name'], 'userTask')
|
||||
self.assertEqual(dct['form'], {
|
||||
"fields": [
|
||||
{
|
||||
"default_value": "I seek the grail!",
|
||||
@ -137,12 +90,39 @@ class UserTaskSpecTest(unittest.TestCase):
|
||||
}
|
||||
],
|
||||
"key": "formKey",
|
||||
}
|
||||
})
|
||||
|
||||
expected_parsed = json.loads(json.dumps(expected))
|
||||
def test_text_field(self):
|
||||
form_field = FormField(form_type="text")
|
||||
form_field.id = "1234"
|
||||
self.form.add_field(form_field)
|
||||
self.assertEqual(form_field, self.user_spec.form.fields[0])
|
||||
|
||||
self.maxDiff = None
|
||||
self.assertDictEqual(actual, expected_parsed)
|
||||
def test_enum_field(self):
|
||||
enum_field = EnumFormField()
|
||||
enum_field.label = "Which kind of fool are you"
|
||||
enum_field.add_option('old fool', 'This is old, therefor it is good.')
|
||||
enum_field.add_option('new fool',
|
||||
'This is new, therefor it is better.')
|
||||
self.form.add_field(enum_field)
|
||||
self.assertEqual(enum_field, self.user_spec.form.fields[-1])
|
||||
|
||||
def test_properties(self):
|
||||
form_field = FormField(form_type="text")
|
||||
self.assertFalse(form_field.has_property("wilma"))
|
||||
form_field.add_property("wilma", "flintstone")
|
||||
self.assertTrue(form_field.has_property("wilma"))
|
||||
self.assertEquals("flintstone", form_field.get_property("wilma"))
|
||||
|
||||
def test_validations(self):
|
||||
form_field = FormField(form_type="text")
|
||||
self.assertFalse(form_field.has_validation("barney"))
|
||||
form_field.add_validation("barney", "rubble")
|
||||
self.assertTrue(form_field.has_validation("barney"))
|
||||
self.assertEquals("rubble", form_field.get_validation("barney"))
|
||||
|
||||
def testIsEngineTask(self):
|
||||
self.assertFalse(self.user_spec.is_engine_task())
|
||||
|
||||
|
||||
def suite():
|
||||
|
@ -1,3 +1,4 @@
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from .BaseTestCase import BaseTestCase
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
@ -18,7 +19,7 @@ class PrescriptPostsciptTest(BaseTestCase):
|
||||
self.call_activity_test(True)
|
||||
|
||||
def testDataObject(self):
|
||||
|
||||
|
||||
spec, subprocesses = self.load_workflow_spec('prescript_postscript_data_object.bpmn', 'Process_1')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
# Set a on the workflow and b in the first task.
|
||||
@ -45,8 +46,21 @@ class PrescriptPostsciptTest(BaseTestCase):
|
||||
ready_tasks[0].complete()
|
||||
self.assertDictEqual({'a': 1, 'b': 2, 'c': 12, 'z': 6}, ready_tasks[0].data)
|
||||
|
||||
def test_for_error(self, save_restore=False):
|
||||
|
||||
spec, subprocesses = self.load_workflow_spec('prescript_postscript.bpmn', 'Process_1')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||
# Calling do-engine steps without setting variables will raise an exception.
|
||||
with self.assertRaises(SpiffWorkflowException) as se:
|
||||
self.workflow.do_engine_steps()
|
||||
ex = se.exception
|
||||
self.assertIn("Error occurred in the Pre-Script", str(ex))
|
||||
|
||||
def call_activity_test(self, save_restore=False):
|
||||
|
||||
|
||||
spec, subprocesses = self.load_workflow_spec('prescript_postscript_*.bpmn', 'parent')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
if save_restore:
|
||||
|
@ -9,7 +9,6 @@ sys.path.insert(0, os.path.join(dirname, '..', '..', '..'))
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from .BaseTestCase import BaseTestCase
|
||||
|
||||
class ServiceTaskDelegate:
|
||||
|
@ -9,7 +9,6 @@ sys.path.insert(0, os.path.join(dirname, '..', '..', '..'))
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from .BaseTestCase import BaseTestCase
|
||||
|
||||
class ServiceTaskDelegate:
|
||||
|
Loading…
x
Reference in New Issue
Block a user