From 35ef5cbe54a18fc177ab2593001d76ab1412c382 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 19 Jan 2023 10:47:07 -0500 Subject: [PATCH] Squashed 'SpiffWorkflow/' changes from 4195453a4..1f51db962 1f51db962 Merge pull request #283 from sartography/feature/better_errors 69fb4967e Patching up some bugs and logical disconnects as I test out the errors. cf5be0096 * Making a few more things consistent in the error messages -- so there isn't filename for validation errors, and file_name for WorkflowExceptions. Same for line_number vs sourceline. * Assure than an error_type is consistently set on exceptions. * ValidationExceptions should not bild up a detailed error message that replicates information available within it. 440ee16c8 Responding to some excellent suggestions from Elizabeth: 655e415e1 Merge pull request #282 from subhakarks/fix-workfowspec-dump 1f6d3cf4e Explain that the error happened in a pre-script or post script. 8119abd14 Added a top level SpiffWorklowException that all exceptions inherit from. Aside from a message string you can append information to these exceptions with "add_note", which is a new method that all exceptions have starting in python 3.11 Switched arguments to the WorkflowException, WorkflowTaskException - which now always takes a string message as the first argument, and named arguments thereafter to be consistent with all other error messages in Python. Consistently raise ValidationExceptions whenever we encounter an error anywhere during parsing of xml. The BPMN/WorkflowTaskExecException is removed, in favor of just calling a WorkflowTaskException. There is nothing BPMN Specific in the logic, so no need for this. Consolidated error message logic so that things like "Did you mean" just get added by default if possible. So we don't have to separately deal with that logic each time. Better Error messages for DMN (include row number as a part of the error information) 13463b5c5 fix for workflowspec dump be26100bc Merge pull request #280 from sartography/feature/remove-unused-bpmn-attributes-and-methods 23a5c1d70 remove 'entering_* methods 4e5875ec8 remove sequence flow 5eed83ab1 Merge pull request #278 from sartography/feature/remove-old-serializer 614f1c68a remove compact serializer and references e7e410d4a remove old serializer and references git-subtree-dir: SpiffWorkflow git-subtree-split: 1f51db962ccaed5810f5d0f7d76a932f056430ab --- SpiffWorkflow/bpmn/FeelLikeScriptEngine.py | 4 +- SpiffWorkflow/bpmn/PythonScriptEngine.py | 45 +- SpiffWorkflow/bpmn/exceptions.py | 48 +- SpiffWorkflow/bpmn/parser/BpmnParser.py | 18 +- SpiffWorkflow/bpmn/parser/ProcessParser.py | 4 +- SpiffWorkflow/bpmn/parser/TaskParser.py | 38 +- .../bpmn/parser/ValidationException.py | 29 +- SpiffWorkflow/bpmn/parser/event_parsers.py | 16 +- SpiffWorkflow/bpmn/parser/node_parser.py | 4 +- SpiffWorkflow/bpmn/parser/task_parsers.py | 26 +- .../bpmn/serializer/BpmnSerializer.py | 178 ------ .../serializer/CompactWorkflowSerializer.py | 483 --------------- SpiffWorkflow/bpmn/serializer/Packager.py | 548 ------------------ .../bpmn/serializer/bpmn_converters.py | 22 +- SpiffWorkflow/bpmn/serializer/dict.py | 259 --------- SpiffWorkflow/bpmn/serializer/json.py | 35 -- SpiffWorkflow/bpmn/serializer/workflow.py | 11 +- .../serializer/workflow_spec_converter.py | 6 - SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py | 7 - SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py | 138 +---- SpiffWorkflow/bpmn/specs/ExclusiveGateway.py | 12 +- SpiffWorkflow/bpmn/specs/InclusiveGateway.py | 4 - SpiffWorkflow/bpmn/specs/ManualTask.py | 4 - SpiffWorkflow/bpmn/specs/MultiInstanceTask.py | 23 +- SpiffWorkflow/bpmn/specs/NoneTask.py | 4 - SpiffWorkflow/bpmn/specs/ParallelGateway.py | 4 - SpiffWorkflow/bpmn/specs/ScriptTask.py | 7 - SpiffWorkflow/bpmn/specs/SubWorkflowTask.py | 13 - SpiffWorkflow/bpmn/specs/UnstructuredJoin.py | 5 +- SpiffWorkflow/bpmn/specs/events/EndEvent.py | 7 - .../bpmn/specs/events/IntermediateEvent.py | 13 - SpiffWorkflow/bpmn/specs/events/StartEvent.py | 8 - .../bpmn/specs/events/event_definitions.py | 40 -- .../bpmn/specs/events/event_types.py | 14 - SpiffWorkflow/bpmn/workflow.py | 41 +- SpiffWorkflow/camunda/parser/task_spec.py | 4 +- SpiffWorkflow/camunda/specs/UserTask.py | 29 +- .../camunda/specs/events/event_definitions.py | 6 - SpiffWorkflow/dmn/engine/DMNEngine.py | 23 +- SpiffWorkflow/dmn/parser/BpmnDmnParser.py | 2 +- SpiffWorkflow/dmn/parser/DMNParser.py | 34 +- .../dmn/serializer/task_spec_converters.py | 2 + SpiffWorkflow/dmn/specs/BusinessRuleTask.py | 18 +- SpiffWorkflow/dmn/specs/model.py | 93 +-- SpiffWorkflow/exceptions.py | 81 ++- SpiffWorkflow/operators.py | 2 +- SpiffWorkflow/serializer/dict.py | 34 +- SpiffWorkflow/serializer/json.py | 8 - SpiffWorkflow/signavio/parser/bpmn.py | 4 +- SpiffWorkflow/signavio/parser/tasks.py | 4 +- SpiffWorkflow/specs/Cancel.py | 2 +- SpiffWorkflow/specs/ExclusiveChoice.py | 5 +- SpiffWorkflow/specs/Join.py | 2 +- SpiffWorkflow/specs/LoopResetTask.py | 3 +- SpiffWorkflow/specs/MultiChoice.py | 6 +- SpiffWorkflow/specs/StartTask.py | 6 +- SpiffWorkflow/specs/SubWorkflow.py | 3 +- SpiffWorkflow/specs/ThreadMerge.py | 2 +- SpiffWorkflow/specs/WorkflowSpec.py | 38 +- SpiffWorkflow/spiff/specs/spiff_task.py | 15 +- SpiffWorkflow/task.py | 15 +- SpiffWorkflow/workflow.py | 8 +- doc/errors.rst | 90 +++ graphics/spiffworkflow_logo_ideas.svg | 131 ++++- tests/SpiffWorkflow/bpmn/ApprovalsTest.py | 89 +-- .../SpiffWorkflow/bpmn/BpmnSerializerTest.py | 118 ---- .../bpmn/BpmnWorkflowSerializerTest.py | 9 - .../bpmn/BpmnWorkflowTestCase.py | 8 +- .../bpmn/CallActivityEndEventTest.py | 4 +- tests/SpiffWorkflow/bpmn/CustomScriptTest.py | 4 +- .../bpmn/InvalidWorkflowsTest.py | 24 +- tests/SpiffWorkflow/bpmn/ParserTest.py | 17 +- tests/SpiffWorkflow/bpmn/ScriptTest.py | 4 +- tests/SpiffWorkflow/bpmn/ServiceTaskTest.py | 5 +- .../bpmn/data/data_object_invalid.bpmn | 152 +++++ .../SpiffWorkflow/bpmn/serializer/dictTest.py | 73 --- .../SpiffWorkflow/bpmn/serializer/jsonTest.py | 38 -- .../InvalidBusinessRuleTaskParserTest.py | 16 +- .../camunda/data/DMNMultiInstance.bpmn | 88 +-- .../data/dmn/test_integer_decision_multi.dmn | 4 +- .../camunda/specs/UserTaskSpecTest.py | 100 ++-- .../spiff/PrescriptPostscriptTest.py | 18 +- tests/SpiffWorkflow/spiff/ServiceTaskTest.py | 1 - .../spiff/ServiceTaskVariableTest.py | 1 - 84 files changed, 796 insertions(+), 2765 deletions(-) delete mode 100644 SpiffWorkflow/bpmn/serializer/BpmnSerializer.py delete mode 100644 SpiffWorkflow/bpmn/serializer/CompactWorkflowSerializer.py delete mode 100644 SpiffWorkflow/bpmn/serializer/Packager.py delete mode 100644 SpiffWorkflow/bpmn/serializer/dict.py delete mode 100644 SpiffWorkflow/bpmn/serializer/json.py create mode 100644 doc/errors.rst delete mode 100644 tests/SpiffWorkflow/bpmn/BpmnSerializerTest.py create mode 100644 tests/SpiffWorkflow/bpmn/data/data_object_invalid.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/serializer/dictTest.py delete mode 100644 tests/SpiffWorkflow/bpmn/serializer/jsonTest.py diff --git a/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py b/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py index 4cdd48679..fe10c5501 100644 --- a/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py +++ b/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py @@ -168,7 +168,7 @@ def feelParseISODuration(input): """ if input[0] != 'P': - raise Exception("Oh Crap!") + raise Exception("ISO Duration format must begin with the letter P") input = input[1:] days, time = input.split("T") lookups = [("Y",days,timedelta(days=365)), @@ -239,7 +239,7 @@ fixes = [(r'string\s+length\((.+?)\)','len(\\1)'), ('true','True'), ('false','False') ] - + externalFuncs = { 'feelConvertTime':feelConvertTime, 'FeelInterval':FeelInterval, diff --git a/SpiffWorkflow/bpmn/PythonScriptEngine.py b/SpiffWorkflow/bpmn/PythonScriptEngine.py index 5d45ceb01..38279a1da 100644 --- a/SpiffWorkflow/bpmn/PythonScriptEngine.py +++ b/SpiffWorkflow/bpmn/PythonScriptEngine.py @@ -4,7 +4,7 @@ import copy import sys import traceback -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException +from ..exceptions import SpiffWorkflowException, WorkflowTaskException from ..operators import Operator @@ -118,10 +118,11 @@ class PythonScriptEngine(object): return expression._matches(task) else: return self._evaluate(expression, task.data, external_methods) + except SpiffWorkflowException as se: + se.add_note(f"Error evaluating expression '{expression}'") + raise se except Exception as e: - raise WorkflowTaskExecException(task, - f"Error evaluating expression {expression}", - e) + raise WorkflowTaskException(f"Error evaluating expression '{expression}'", task=task, exception=e) def execute(self, task, script, external_methods=None): """ @@ -141,25 +142,33 @@ class PythonScriptEngine(object): raise NotImplementedError("To call external services override the script engine and implement `call_service`.") def create_task_exec_exception(self, task, script, err): - - if isinstance(err, WorkflowTaskExecException): + line_number, error_line = self.get_error_line_number_and_content(script, err) + if isinstance(err, SpiffWorkflowException): + err.line_number = line_number + err.error_line = error_line + err.add_note(f"Python script error on line {line_number}: '{error_line}'") return err - detail = err.__class__.__name__ if len(err.args) > 0: detail += ":" + err.args[0] + return WorkflowTaskException(detail, task=task, exception=err, line_number=line_number, error_line=error_line) + + def get_error_line_number_and_content(self, script, err): line_number = 0 error_line = '' - cl, exc, tb = sys.exc_info() - # Loop back through the stack trace to find the file called - # 'string' - which is the script we are executing, then use that - # to parse and pull out the offending line. - for frame_summary in traceback.extract_tb(tb): - if frame_summary.filename == '': - line_number = frame_summary.lineno - error_line = script.splitlines()[line_number - 1] - return WorkflowTaskExecException(task, detail, err, line_number, - error_line) + if isinstance(err, SyntaxError): + line_number = err.lineno + else: + cl, exc, tb = sys.exc_info() + # Loop back through the stack trace to find the file called + # 'string' - which is the script we are executing, then use that + # to parse and pull out the offending line. + for frame_summary in traceback.extract_tb(tb): + if frame_summary.filename == '': + line_number = frame_summary.lineno + if line_number > 0: + error_line = script.splitlines()[line_number - 1] + return line_number, error_line def check_for_overwrite(self, task, external_methods): """It's possible that someone will define a variable with the @@ -172,7 +181,7 @@ class PythonScriptEngine(object): msg = f"You have task data that overwrites a predefined " \ f"function(s). Please change the following variable or " \ f"field name(s) to something else: {func_overwrites}" - raise WorkflowTaskExecException(task, msg) + raise WorkflowTaskException(msg, task=task) def convert_to_box(self, data): if isinstance(data, dict): diff --git a/SpiffWorkflow/bpmn/exceptions.py b/SpiffWorkflow/bpmn/exceptions.py index 9d3163939..9bc44d818 100644 --- a/SpiffWorkflow/bpmn/exceptions.py +++ b/SpiffWorkflow/bpmn/exceptions.py @@ -1,50 +1,4 @@ -import re - -from SpiffWorkflow.exceptions import WorkflowException, WorkflowTaskException -from SpiffWorkflow.util import levenshtein - -class WorkflowTaskExecException(WorkflowTaskException): - """ - Exception during execution of task "payload". For example: - - * ScriptTask during execution of embedded script, - * ServiceTask during external service call. - """ - - def __init__(self, task, error_msg, exception=None, line_number=0, error_line=""): - """ - Exception initialization. - - :param task: the task that threw the exception - :type task: Task - :param exception: a human readable error message - :type exception: Exception - - """ - - self.offset = 0 - self.line_number = line_number - self.error_line = error_line - - if isinstance(exception, SyntaxError): - # Prefer line number from syntax error if available. - self.line_number = exception.lineno - self.offset = exception.offset - elif isinstance(exception, NameError): - def_match = re.match("name '(.+)' is not defined", str(exception)) - if def_match: - bad_variable = re.match("name '(.+)' is not defined", str(exception)).group(1) - most_similar = levenshtein.most_similar(bad_variable, task.data.keys(), 3) - error_msg = f'something you are referencing does not exist: ' \ - f'"{exception}".' - if len(most_similar) == 1: - error_msg += f' Did you mean \'{most_similar[0]}\'?' - if len(most_similar) > 1: - error_msg += f' Did you mean one of \'{most_similar}\'?' - - else: - error_msg = str(exception) - super().__init__(task, error_msg, exception) +from SpiffWorkflow.exceptions import WorkflowException class WorkflowDataException(WorkflowException): diff --git a/SpiffWorkflow/bpmn/parser/BpmnParser.py b/SpiffWorkflow/bpmn/parser/BpmnParser.py index 581a00aea..9e2db083f 100644 --- a/SpiffWorkflow/bpmn/parser/BpmnParser.py +++ b/SpiffWorkflow/bpmn/parser/BpmnParser.py @@ -21,7 +21,7 @@ import glob import os from lxml import etree -from lxml.etree import DocumentInvalid +from lxml.etree import DocumentInvalid, LxmlError from SpiffWorkflow.bpmn.specs.events.event_definitions import NoneEventDefinition @@ -72,8 +72,13 @@ class BpmnValidator: def validate(self, bpmn, filename=None): try: self.validator.assertValid(bpmn) - except DocumentInvalid as di: - raise DocumentInvalid(str(di) + "file: " + filename) + except ValidationException as ve: + ve.file_name = filename + ve.line_number = self.validator.error_log.last_error.line + except LxmlError as le: + last_error = self.validator.error_log.last_error + raise ValidationException(last_error.message, file_name=filename, + line_number=last_error.line) class BpmnParser(object): """ @@ -211,8 +216,7 @@ class BpmnParser(object): correlation_identifier = correlation.attrib.get("id") if correlation_identifier is None: raise ValidationException( - "Correlation identifier is missing from bpmn xml" - ) + "Correlation identifier is missing from bpmn xml" ) correlation_property_retrieval_expressions = correlation.xpath( "//bpmn:correlationPropertyRetrievalExpression", namespaces = self.namespaces) if not correlation_property_retrieval_expressions: @@ -243,9 +247,9 @@ class BpmnParser(object): def create_parser(self, node, filename=None, lane=None): parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, filename=filename, lane=lane) if parser.get_id() in self.process_parsers: - raise ValidationException('Duplicate process ID', node=node, filename=filename) + raise ValidationException('Duplicate process ID', node=node, file_name=filename) if parser.get_name() in self.process_parsers_by_name: - raise ValidationException('Duplicate process name', node=node, filename=filename) + raise ValidationException('Duplicate process name', node=node, file_name=filename) self.process_parsers[parser.get_id()] = parser self.process_parsers_by_name[parser.get_name()] = parser diff --git a/SpiffWorkflow/bpmn/parser/ProcessParser.py b/SpiffWorkflow/bpmn/parser/ProcessParser.py index c6f6b665b..caa05168d 100644 --- a/SpiffWorkflow/bpmn/parser/ProcessParser.py +++ b/SpiffWorkflow/bpmn/parser/ProcessParser.py @@ -93,7 +93,7 @@ class ProcessParser(NodeParser): (node_parser, spec_class) = self.parser._get_parser_class(node.tag) if not node_parser or not spec_class: raise ValidationException("There is no support implemented for this task type.", - node=node, filename=self.filename) + node=node, file_name=self.filename) np = node_parser(self, spec_class, node, lane=self.lane) task_spec = np.parse_node() return task_spec @@ -103,7 +103,7 @@ class ProcessParser(NodeParser): # bpmn:startEvent if we have a subworkflow task start_node_list = self.xpath('./bpmn:startEvent') if not start_node_list and self.process_executable: - raise ValidationException("No start event found", node=self.node, filename=self.filename) + raise ValidationException("No start event found", node=self.node, file_name=self.filename) self.spec = BpmnProcessSpec(name=self.get_id(), description=self.get_name(), filename=self.filename) # Check for an IO Specification. diff --git a/SpiffWorkflow/bpmn/parser/TaskParser.py b/SpiffWorkflow/bpmn/parser/TaskParser.py index 552fe7117..5c17f7347 100644 --- a/SpiffWorkflow/bpmn/parser/TaskParser.py +++ b/SpiffWorkflow/bpmn/parser/TaskParser.py @@ -72,7 +72,7 @@ class TaskParser(NodeParser): raise ValidationException( f'Unsupported MultiInstance Task: {self.task.__class__}', node=self.node, - filename=self.filename) + file_name=self.filename) self.task.loopTask = loop_task self.task.isSequential = is_sequential @@ -127,17 +127,15 @@ class TaskParser(NodeParser): self.spec, '%s.BoundaryEventParent' % self.get_id(), self.task, lane=self.task.lane) self.process_parser.parsed_nodes[self.node.get('id')] = parent - parent.connect_outgoing(self.task, '%s.FromBoundaryEventParent' % self.get_id(), None, None) + parent.connect(self.task) for event in children: child = self.process_parser.parse_node(event) if isinstance(child.event_definition, CancelEventDefinition) \ and not isinstance(self.task, TransactionSubprocess): raise ValidationException('Cancel Events may only be used with transactions', - node=self.node, - filename=self.filename) - parent.connect_outgoing(child, - '%s.FromBoundaryEventParent' % event.get('id'), - None, None) + node=self.node, + file_name=self.filename) + parent.connect(child) return parent def parse_node(self): @@ -169,7 +167,7 @@ class TaskParser(NodeParser): 'Multiple outgoing flows are not supported for ' 'tasks of type', node=self.node, - filename=self.filename) + file_name=self.filename) for sequence_flow in outgoing: target_ref = sequence_flow.get('targetRef') try: @@ -179,7 +177,7 @@ class TaskParser(NodeParser): 'When looking for a task spec, we found two items, ' 'perhaps a form has the same ID? (%s)' % target_ref, node=self.node, - filename=self.filename) + file_name=self.filename) c = self.process_parser.parse_node(target_node) position = c.position @@ -196,18 +194,13 @@ class TaskParser(NodeParser): default_outgoing = sequence_flow.get('id') for (position, c, target_node, sequence_flow) in children: - self.connect_outgoing( - c, target_node, sequence_flow, - sequence_flow.get('id') == default_outgoing) + self.connect_outgoing(c, sequence_flow, sequence_flow.get('id') == default_outgoing) return parent if boundary_event_nodes else self.task - except ValidationException: - raise + except ValidationException as ve: + raise ve except Exception as ex: - exc_info = sys.exc_info() - tb = "".join(traceback.format_exception( - exc_info[0], exc_info[1], exc_info[2])) - raise ValidationException("%r" % (ex), node=self.node, filename=self.filename) + raise ValidationException("%r" % (ex), node=self.node, file_name=self.filename) def get_task_spec_name(self, target_ref=None): """ @@ -225,18 +218,13 @@ class TaskParser(NodeParser): description=self.node.get('name', None), position=self.position) - def connect_outgoing(self, outgoing_task, outgoing_task_node, - sequence_flow_node, is_default): + def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default): """ Connects this task to the indicating outgoing task, with the details in the sequence flow. A subclass can override this method to get extra information from the node. """ - self.task.connect_outgoing( - outgoing_task, sequence_flow_node.get('id'), - sequence_flow_node.get( - 'name', None), - self.parse_documentation(sequence_flow_node)) + self.task.connect(outgoing_task) def handles_multiple_outgoing(self): """ diff --git a/SpiffWorkflow/bpmn/parser/ValidationException.py b/SpiffWorkflow/bpmn/parser/ValidationException.py index cdbab4e90..78c98b328 100644 --- a/SpiffWorkflow/bpmn/parser/ValidationException.py +++ b/SpiffWorkflow/bpmn/parser/ValidationException.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2012 Matthew Hampton +# Copyright (C) 2012 Matthew Hampton, 2023 Dan Funk # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public @@ -17,10 +17,10 @@ # 02110-1301 USA from .util import BPMN_MODEL_NS +from ...exceptions import SpiffWorkflowException -class ValidationException(Exception): - +class ValidationException(SpiffWorkflowException): """ A ValidationException should be thrown with enough information for the user to diagnose the problem and sort it out. @@ -28,23 +28,20 @@ class ValidationException(Exception): If available, please provide the offending XML node and filename. """ - def __init__(self, msg, node=None, filename=None, *args, **kwargs): + def __init__(self, msg, node=None, file_name=None, *args, **kwargs): if node is not None: self.tag = self._shorten_tag(node.tag) - self.id = node.get('id', '') - self.name = node.get('name', '') - self.sourceline = getattr(node, 'sourceline', '') + self.id = node.get('id', '') + self.name = node.get('name', '') + self.line_number = getattr(node, 'line_number', '') else: - self.tag = '' - self.id = '' - self.name = '' - self.sourceline = '' - self.filename = filename or '' - message = ('%s\nSource Details: ' - '%s (id:%s), name \'%s\', line %s in %s') % ( - msg, self.tag, self.id, self.name, self.sourceline, self.filename) + self.tag = kwargs.get('tag', '') + self.id = kwargs.get('id', '') + self.name = kwargs.get('name', '') + self.line_number = kwargs.get('line_number', '') + self.file_name = file_name or '' - super(ValidationException, self).__init__(message, *args, **kwargs) + super(ValidationException, self).__init__(msg, *args) @classmethod def _shorten_tag(cls, tag): diff --git a/SpiffWorkflow/bpmn/parser/event_parsers.py b/SpiffWorkflow/bpmn/parser/event_parsers.py index 177d755a4..d4099b925 100644 --- a/SpiffWorkflow/bpmn/parser/event_parsers.py +++ b/SpiffWorkflow/bpmn/parser/event_parsers.py @@ -93,9 +93,9 @@ class EventDefinitionParser(TaskParser): time_cycle = first(self.xpath('.//bpmn:timeCycle')) if time_cycle is not None: return CycleTimerEventDefinition(label, time_cycle.text) - raise ValidationException("Unknown Time Specification", node=self.node, filename=self.filename) + raise ValidationException("Unknown Time Specification", node=self.node, file_name=self.filename) except Exception as e: - raise ValidationException("Time Specification Error. " + str(e), node=self.node, filename=self.filename) + raise ValidationException("Time Specification Error. " + str(e), node=self.node, file_name=self.filename) def get_message_correlations(self, message_ref): @@ -186,7 +186,7 @@ class EndEventParser(EventDefinitionParser): event_definition = self.get_event_definition([MESSAGE_EVENT_XPATH, CANCEL_EVENT_XPATH, ERROR_EVENT_XPATH, ESCALATION_EVENT_XPATH, TERMINATION_EVENT_XPATH]) task = self._create_task(event_definition) - task.connect_outgoing(self.spec.end, '%s.ToEndJoin' % self.node.get('id'), None, None) + task.connect(self.spec.end) return task @@ -251,12 +251,6 @@ class EventBasedGatewayParser(EventDefinitionParser): def handles_multiple_outgoing(self): return True - def connect_outgoing(self, outgoing_task, outgoing_task_node, sequence_flow_node, is_default): + def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default): self.task.event_definition.event_definitions.append(outgoing_task.event_definition) - self.task.connect_outgoing( - outgoing_task, - sequence_flow_node.get('id'), - sequence_flow_node.get('name', None), - self.parse_documentation(sequence_flow_node) - ) - \ No newline at end of file + self.task.connect(outgoing_task) diff --git a/SpiffWorkflow/bpmn/parser/node_parser.py b/SpiffWorkflow/bpmn/parser/node_parser.py index 0b1c8ed46..055520355 100644 --- a/SpiffWorkflow/bpmn/parser/node_parser.py +++ b/SpiffWorkflow/bpmn/parser/node_parser.py @@ -46,7 +46,7 @@ class NodeParser: if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects: specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')]) else: - raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.filename) + raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.file_name) return specs def parse_outgoing_data_references(self): @@ -56,7 +56,7 @@ class NodeParser: if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects: specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')]) else: - raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename) + raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.file_name) return specs def parse_extensions(self, node=None): diff --git a/SpiffWorkflow/bpmn/parser/task_parsers.py b/SpiffWorkflow/bpmn/parser/task_parsers.py index 8b071a77f..b83537652 100644 --- a/SpiffWorkflow/bpmn/parser/task_parsers.py +++ b/SpiffWorkflow/bpmn/parser/task_parsers.py @@ -57,25 +57,17 @@ class ExclusiveGatewayParser(TaskParser): appropriately. """ - def connect_outgoing(self, outgoing_task, outgoing_task_node, - sequence_flow_node, is_default): + def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default): if is_default: - super(ExclusiveGatewayParser, self).connect_outgoing( - outgoing_task, outgoing_task_node, sequence_flow_node, - is_default) + super(ExclusiveGatewayParser, self).connect_outgoing(outgoing_task, sequence_flow_node, is_default) else: cond = self.parse_condition(sequence_flow_node) if cond is None: raise ValidationException( - 'Non-default exclusive outgoing sequence flow ' - ' without condition', + 'Non-default exclusive outgoing sequence flow without condition', sequence_flow_node, self.filename) - self.task.connect_outgoing_if( - cond, outgoing_task, - sequence_flow_node.get('id'), - sequence_flow_node.get('name', None), - self.parse_documentation(sequence_flow_node)) + self.task.connect_outgoing_if(cond, outgoing_task) def handles_multiple_outgoing(self): return True @@ -121,12 +113,12 @@ class SubprocessParser: raise ValidationException( 'Multiple Start points are not allowed in SubWorkflow Task', node=task_parser.node, - filename=task_parser.filename) + file_name=task_parser.filename) if len(workflow_end_event) == 0: raise ValidationException( 'A SubWorkflow Must contain an End event', node=task_parser.node, - filename=task_parser.filename) + file_name=task_parser.filename) nsmap = DEFAULT_NSMAP.copy() nsmap['camunda'] = "http://camunda.org/schema/1.0/bpmn" @@ -151,14 +143,14 @@ class SubprocessParser: raise ValidationException( 'No "calledElement" attribute for Call Activity.', node=task_parser.node, - filename=task_parser.filename) + file_name=task_parser.filename) parser = task_parser.process_parser.parser.get_process_parser(called_element) if parser is None: raise ValidationException( f"The process '{called_element}' was not found. Did you mean one of the following: " f"{', '.join(task_parser.process_parser.parser.get_process_ids())}?", node=task_parser.node, - filename=task_parser.filename) + file_name=task_parser.filename) return called_element @@ -206,7 +198,7 @@ class ScriptTaskParser(TaskParser): except AssertionError as ae: raise ValidationException( f"Invalid Script Task. No Script Provided. " + str(ae), - node=self.node, filename=self.filename) + node=self.node, file_name=self.filename) class ServiceTaskParser(TaskParser): diff --git a/SpiffWorkflow/bpmn/serializer/BpmnSerializer.py b/SpiffWorkflow/bpmn/serializer/BpmnSerializer.py deleted file mode 100644 index 278734e39..000000000 --- a/SpiffWorkflow/bpmn/serializer/BpmnSerializer.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2020 Matthew Hampton, Dan Funk -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301 USA - -import configparser -from io import BytesIO, TextIOWrapper -from warnings import warn - -from lxml import etree -import zipfile -import os - -from SpiffWorkflow.task import TaskState -from ...bpmn.specs.SubWorkflowTask import SubWorkflowTask -from ...bpmn.workflow import BpmnWorkflow -from ...bpmn.serializer.json import BPMNJSONSerializer -from ..parser.BpmnParser import BpmnParser -from .Packager import Packager - -class BpmnSerializer(BPMNJSONSerializer): - """ - DEPRECATED --- This call remains available only so that folks can deserialize - existing workflows. - The BpmnSerializer class provides support for deserializing a Bpmn Workflow - Spec from a BPMN package. The BPMN package must have been created using the - :class:`SpiffWorkflow.bpmn.serializer.Packager`. - - It will also use the appropriate subclass of BpmnParser, if one is included - in the metadata.ini file. - """ - - def __init_subclass__(cls, **kwargs): - """This throws a deprecation warning on subclassing.""" - warn(f'{cls.__name__} is deprecated. Please use ' - f'bpmn.serializer.workflow.BpmnWorkflowSerializer', - DeprecationWarning, stacklevel=2) - super().__init_subclass__(**kwargs) - - def serialize_workflow(self, workflow, **kwargs): - """ - *** DEPRECATED *** DEPRECATED *** DEPRECATED *** DEPRECATED *** - Serializes the workflow data and task tree. Will also serialize - the Spec if 'include_spec' kwarg is not set to false. - Please use bpmn.serializer.workflow.BpmnWorkflowSerializer for - Serialization. This class remains available only to help transition - to the new Serialization scheme. - """ - """This throws a deprecation warning on initialization.""" - warn(f'{self.__class__.__name__} is deprecated. DO NOT continue to ' - f'use it for serialization. Deserialize your old workflows, then' - f'move to the new serializer for storing. See ' - f'bpmn.serializer.workflow.BpmnWorkflowSerializer', - DeprecationWarning, stacklevel=2) - assert isinstance(workflow, BpmnWorkflow) - include_spec = kwargs.get('include_spec',True) - return super().serialize_workflow(workflow, include_spec=include_spec) - - def serialize_task(self, task, skip_children=False, **kwargs): - return super().serialize_task(task, - skip_children=skip_children, - allow_subs=True) - - def deserialize_workflow(self, s_state, workflow_spec=None, - read_only=False, **kwargs): - - return super().deserialize_workflow(s_state, - wf_class=BpmnWorkflow, - wf_spec=workflow_spec, - read_only=read_only, - **kwargs) - - def _deserialize_task_children(self, task, s_state): - """Reverses the internal process that will merge children from a - sub-workflow in the top level workflow. This copies the states - back into the sub-workflow after generating it from the base spec""" - if not isinstance(task.task_spec, SubWorkflowTask): - return super()._deserialize_task_children(task, s_state) - - sub_workflow = task.task_spec.create_sub_workflow(task) - children = [] - for c in s_state['children']: - # One child belongs to the parent workflow (The path back - # out of the subworkflow) the other children belong to the - # sub-workflow. - - # We need to determine if we are still in the same workflow, - # Ideally we can just check: if c['workflow_name'] == sub_workflow.name - # however, we need to support deserialization of workflows without this - # critical property, at least temporarily, so people can migrate. - if 'workflow_name' in c: - same_workflow = c['workflow_name'] == sub_workflow.name - else: - same_workflow = sub_workflow.get_tasks_from_spec_name(c['task_spec']) - - if same_workflow: - start_task = self.deserialize_task(sub_workflow, c) - children.append(start_task) - start_task.parent = task.id - sub_workflow.task_tree = start_task - # get a list of tasks in reverse order of change - # our last task should be on the top. - tasks = sub_workflow.get_tasks(TaskState.COMPLETED) - tasks.sort(key=lambda x: x.last_state_change,reverse=True) - if len(tasks)>0: - last_task = tasks[0] - sub_workflow.last_task = last_task - else: - resume_task = self.deserialize_task(task.workflow, c) - resume_task.parent = task.id - children.append(resume_task) - return children - - def deserialize_task(self, workflow, s_state): - assert isinstance(workflow, BpmnWorkflow) - return super().deserialize_task(workflow, s_state) - - def deserialize_workflow_spec(self, s_state, filename=None): - """ - :param s_state: a byte-string with the contents of the packaged - workflow archive, or a file-like object. - - :param filename: the name of the package file. - """ - if isinstance(s_state,dict): - return super().deserialize_workflow_spec(s_state) - if isinstance(s_state,str): - return super().deserialize_workflow_spec(s_state) - if isinstance(s_state, bytes): - s_state = BytesIO(s_state) - - package_zip = zipfile.ZipFile( - s_state, "r", compression=zipfile.ZIP_DEFLATED) - config = configparser.ConfigParser() - ini_fp = TextIOWrapper( - package_zip.open(Packager.METADATA_FILE), encoding="UTF-8") - try: - config.read_file(ini_fp) - finally: - ini_fp.close() - - parser_class = BpmnParser - parser_class_module = config.get( - 'MetaData', 'parser_class_module', fallback=None) - - if parser_class_module: - mod = __import__(parser_class_module, fromlist=[ - config.get('MetaData', 'parser_class')]) - parser_class = getattr(mod, config.get('MetaData', 'parser_class')) - - parser = parser_class() - - for info in package_zip.infolist(): - parts = os.path.split(info.filename) - if (len(parts) == 2 and not parts[0] and parts[1].lower().endswith('.bpmn')): - # It is in the root of the ZIP and is a BPMN file - bpmn_fp = package_zip.open(info) - try: - bpmn = etree.parse(bpmn_fp) - finally: - bpmn_fp.close() - - parser.add_bpmn_xml(bpmn, filename='%s:%s' % (filename, info.filename)) - spec_name = config.get('MetaData', 'entry_point_process') - return parser.get_spec(spec_name) diff --git a/SpiffWorkflow/bpmn/serializer/CompactWorkflowSerializer.py b/SpiffWorkflow/bpmn/serializer/CompactWorkflowSerializer.py deleted file mode 100644 index 9ed543fbf..000000000 --- a/SpiffWorkflow/bpmn/serializer/CompactWorkflowSerializer.py +++ /dev/null @@ -1,483 +0,0 @@ -# -*- coding: utf-8 -*- -from builtins import str -from builtins import hex -from builtins import range -from builtins import object -# Copyright (C) 2012 Matthew Hampton -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301 USA - -from collections import deque -import json -from ...task import TaskState -from ...specs.Subworkflow import SubWorkflow -from ...serializer.base import Serializer -from ..workflow import BpmnWorkflow - - -class UnrecoverableWorkflowChange(Exception): - """ - This is thrown if the workflow cannot be restored because the workflow spec - has changed, and the identified transitions no longer exist. - """ - pass - - -class _RouteNode(object): - """ - Private helper class - """ - - def __init__(self, task_spec, outgoing_route_node=None): - self.task_spec = task_spec - self.outgoing = [outgoing_route_node] if outgoing_route_node else [] - self.state = None - - def get_outgoing_by_spec(self, task_spec): - m = [r for r in self.outgoing if r.task_spec == task_spec] - return m[0] if m else None - - def to_list(self): - result = [] - n = self - while n.outgoing: - assert len( - n.outgoing) == 1, "to_list(..) cannot be called after a merge" - result.append(n.task_spec) - n = n.outgoing[0] - result.append(n.task_spec) - return result - - def contains(self, other_route): - if isinstance(other_route, list): - return self.to_list()[0:len(other_route)] == other_route - - # This only works before merging - assert len(other_route.outgoing) <= 1,\ - "contains(..) cannot be called after a merge" - assert len(self.outgoing) <= 1,\ - "contains(..) cannot be called after a merge" - - if other_route.task_spec == self.task_spec: - if other_route.outgoing and self.outgoing: - return self.outgoing[0].contains(other_route.outgoing[0]) - elif self.outgoing: - return True - elif not other_route.outgoing: - return True - return False - - -class _BpmnProcessSpecState(object): - - """ - Private helper class - """ - - def __init__(self, spec): - self.spec = spec - self.route = None - - def get_path_to_transition(self, transition, state, workflow_parents, - taken_routes=None): - # find a route passing through each task: - route = [self.spec.start] - route_to_parent_complete = None - for task_name in workflow_parents: - route = self._breadth_first_task_search(str(task_name), route) - if route is None: - raise UnrecoverableWorkflowChange( - 'No path found for route \'%s\'' % transition) - route_to_parent_complete = route + [route[-1].outputs[0]] - route = route + [route[-1].spec.start] - route = self._breadth_first_transition_search( - transition, route, taken_routes=taken_routes) - if route is None: - raise UnrecoverableWorkflowChange( - 'No path found for route \'%s\'' % transition) - outgoing_route_node = None - for spec in reversed(route): - outgoing_route_node = _RouteNode(spec, outgoing_route_node) - outgoing_route_node.state = state - return outgoing_route_node, route_to_parent_complete - - def add_route(self, outgoing_route_node): - if self.route: - self._merge_routes(self.route, outgoing_route_node) - else: - self.route = outgoing_route_node - - def dump(self): - print(self.get_dump()) - - def get_dump(self): - def recursive_dump(route_node, indent, verbose=False): - - task_spec = route_node.task_spec - dump = '%s (%s:%s)' % ( - task_spec.name, - task_spec.__class__.__name__, - hex(id(task_spec))) + '\n' - if verbose: - if task_spec.inputs: - dump += indent + '- IN: ' + \ - ','.join(['%s (%s)' % (t.name, hex(id(t))) - for t in task_spec.inputs]) + '\n' - if task_spec.outputs: - dump += indent + '- OUT: ' + \ - ','.join(['%s (%s)' % (t.name, hex(id(t))) - for t in task_spec.outputs]) + '\n' - - for i, t in enumerate(route_node.outgoing): - dump += indent + ' --> ' + \ - recursive_dump( - t, indent + ( - ' | ' if i + 1 < len(route_node.outgoing) - else ' ')) - return dump - - dump = recursive_dump(self.route, '') - return dump - - def go(self, workflow): - leaf_tasks = [] - self._go(workflow.task_tree.children[0], self.route, leaf_tasks) - for task in sorted( - leaf_tasks, - key=lambda t: 0 if getattr( - t, '_bpmn_load_target_state', TaskState.READY) == TaskState.READY - else 1): - task.task_spec._update(task) - task._inherit_data() - if hasattr(task, '_bpmn_load_target_state'): - delattr(task, '_bpmn_load_target_state') - - def _go(self, task, route_node, leaf_tasks): - assert task.task_spec == route_node.task_spec - if not route_node.outgoing: - assert route_node.state is not None - setattr(task, '_bpmn_load_target_state', route_node.state) - leaf_tasks.append(task) - else: - if not task._is_finished(): - if (issubclass(task.task_spec.__class__, SubWorkflow) and - task.task_spec.spec.start in - [o.task_spec for o in route_node.outgoing]): - self._go_in_to_subworkflow( - task, [n.task_spec for n in route_node.outgoing]) - else: - self._complete_task_silent( - task, [n.task_spec for n in route_node.outgoing]) - for n in route_node.outgoing: - matching_child = [ - t for t in task.children if t.task_spec == n.task_spec] - assert len(matching_child) == 1 - self._go(matching_child[0], n, leaf_tasks) - - def _complete_task_silent(self, task, target_children_specs): - # This method simulates the completing of a task, but without hooks - # being called, and targeting a specific subset of the children - if task._is_finished(): - return - task._set_state(TaskState.COMPLETED) - - task.children = [] - for task_spec in target_children_specs: - task._add_child(task_spec) - - def _go_in_to_subworkflow(self, my_task, target_children_specs): - # This method simulates the entering of a subworkflow, but without - # hooks being called, and targeting a specific subset of the entry - # tasks in the subworkflow. It creates the new workflow instance and - # merges it in to the tree This is based on - # SubWorkflow._on_ready_before_hook(..) - if my_task._is_finished(): - return - - subworkflow = my_task.task_spec._create_subworkflow(my_task) - subworkflow.completed_event.connect( - my_task.task_spec._on_subworkflow_completed, my_task) - - # Create the children (these are the tasks that follow the subworkflow, - # on completion: - my_task.children = [] - my_task._sync_children(my_task.task_spec.outputs, TaskState.FUTURE) - for t in my_task.children: - t.task_spec._predict(t) - - # Integrate the tree of the subworkflow into the tree of this workflow. - for child in subworkflow.task_tree.children: - if child.task_spec in target_children_specs: - my_task.children.insert(0, child) - child.parent = my_task - - my_task._set_internal_data(subworkflow=subworkflow) - - my_task._set_state(TaskState.COMPLETED) - - def _merge_routes(self, target, src): - assert target.task_spec == src.task_spec - for out_route in src.outgoing: - target_out_route = target.get_outgoing_by_spec(out_route.task_spec) - if target_out_route: - self._merge_routes(target_out_route, out_route) - else: - target.outgoing.append(out_route) - - def _breadth_first_transition_search(self, transition_id, starting_route, - taken_routes=None): - return self._breadth_first_search(starting_route, - transition_id=transition_id, - taken_routes=taken_routes) - - def _breadth_first_task_search(self, task_name, starting_route): - return self._breadth_first_search(starting_route, task_name=task_name) - - def _breadth_first_search(self, starting_route, task_name=None, - transition_id=None, taken_routes=None): - q = deque() - done = set() - q.append(starting_route) - while q: - route = q.popleft() - if not route[-1] == starting_route[-1]: - if task_name and route[-1].name == task_name: - return route - if (transition_id and - hasattr(route[-1], 'has_outgoing_sequence_flow') and - route[-1].has_outgoing_sequence_flow(transition_id)): - spec = route[-1].get_outgoing_sequence_flow_by_id( - transition_id).target_task_spec - if taken_routes: - final_route = route + [spec] - for taken in taken_routes: - t = taken.to_list() if not isinstance( - taken, list) else taken - if final_route[0:len(t)] == t: - spec = None - break - if spec: - route.append(spec) - return route - for child in route[-1].outputs: - new_route = route + [child] - if len(new_route) > 10000: - raise ValueError("Maximum looping limit exceeded " - "searching for path to % s" % - (task_name or transition_id)) - new_route_r = tuple(new_route) - if new_route_r not in done: - done.add(new_route_r) - q.append(new_route) - return None - - -class CompactWorkflowSerializer(Serializer): - """ - This class provides an implementation of serialize_workflow and - deserialize_workflow that produces a compact representation of the workflow - state, that can be stored in a database column or reasonably small size. - - It records ONLY enough information to identify the transition leading in to - each WAITING or READY state, along with the state of that task. This is - generally enough to resurrect a running BPMN workflow instance, with some - limitations. - - Limitations: - 1. The compact representation does not include any workflow or task data. - It is the responsibility of the calling application to record whatever - data is relevant to it, and set it on the restored workflow. - 2. The restoring process will not produce exactly the same workflow tree - - it finds the SHORTEST route to the saved READY and WAITING tasks, not - the route that was actually taken. This means that the tree cannot be - interrogated for historical information about the workflow. However, the - workflow does follow the same logic paths as would have been followed by - the original workflow. - """ - - STATE_SPEC_VERSION = 1 - - def serialize_workflow_spec(self, wf_spec, **kwargs): - raise NotImplementedError( - "The CompactWorkflowSerializer only supports " - " workflow serialization.") - - def deserialize_workflow_spec(self, s_state, **kwargs): - raise NotImplementedError( - "The CompactWorkflowSerializer only supports " - "workflow serialization.") - - def serialize_workflow(self, workflow, include_spec=False, **kwargs): - """ - :param workflow: the workflow instance to serialize - - :param include_spec: Always set to False (The CompactWorkflowSerializer - only supports workflow serialization) - """ - if include_spec: - raise NotImplementedError( - 'Including the spec serialization with the workflow state ' - 'is not implemented.') - return self._get_workflow_state(workflow) - - def deserialize_workflow(self, s_state, workflow_spec=None, - read_only=False, **kwargs): - """ - :param s_state: the state of the workflow as returned by - serialize_workflow - - :param workflow_spec: the Workflow Spec of the workflow - (CompactWorkflowSerializer only supports workflow serialization) - - :param read_only: (Optional) True if the workflow should be restored in - READ ONLY mode - - NB: Additional kwargs passed to the deserialize_workflow method will be - passed to the new_workflow method. - """ - if workflow_spec is None: - raise NotImplementedError( - 'Including the spec serialization with the workflow state is ' - ' not implemented. A \'workflow_spec\' must ' - 'be provided.') - workflow = self.new_workflow( - workflow_spec, read_only=read_only, **kwargs) - self._restore_workflow_state(workflow, s_state) - return workflow - - def new_workflow(self, workflow_spec, read_only=False, **kwargs): - """ - Create a new workflow instance from the given spec and arguments. - - :param workflow_spec: the workflow spec to use - - :param read_only: this should be in read only mode - - :param kwargs: Any extra kwargs passed to the deserialize_workflow - method will be passed through here - """ - return BpmnWorkflow(workflow_spec, read_only=read_only, **kwargs) - - def _get_workflow_state(self, workflow): - active_tasks = workflow.get_tasks(state=(TaskState.READY | TaskState.WAITING)) - states = [] - - for task in active_tasks: - parent_task_spec = task.parent.task_spec - transition = parent_task_spec.get_outgoing_sequence_flow_by_spec( - task.task_spec).id - w = task.workflow - workflow_parents = [] - while w.outer_workflow and w.outer_workflow != w: - workflow_parents.append(w.name) - w = w.outer_workflow - state = ("W" if task.state == TaskState.WAITING else "R") - states.append( - [transition, list(reversed(workflow_parents)), state]) - - compacted_states = [] - for state in sorted(states, - key=lambda s: ",".join([s[0], - s[2], - (':'.join(s[1]))])): - if state[-1] == 'R': - state.pop() - if state[-1] == []: - state.pop() - if len(state) == 1: - state = state[0] - compacted_states.append(state) - - state_list = compacted_states + [self.STATE_SPEC_VERSION] - state_s = json.dumps(state_list)[1:-1] - return state_s - - def _restore_workflow_state(self, workflow, state): - state_list = json.loads('[' + state + ']') - - self._check_spec_version(state_list[-1]) - - s = _BpmnProcessSpecState(workflow.spec) - - routes = [] - for state in state_list[:-1]: - if isinstance(state, str) or type(state).__name__ == 'str': - state = [str(state)] - transition = state[0] - workflow_parents = state[1] if len(state) > 1 else [] - state = (TaskState.WAITING if len(state) > - 2 and state[2] == 'W' else TaskState.READY) - - route, route_to_parent_complete = s.get_path_to_transition( - transition, state, workflow_parents) - routes.append( - (route, route_to_parent_complete, transition, state, - workflow_parents)) - - retry = True - retry_count = 0 - while (retry): - if retry_count > 100: - raise ValueError( - 'Maximum retry limit exceeded searching for unique paths') - retry = False - - for i in range(len(routes)): - (route, route_to_parent_complete, transition, state, - workflow_parents) = routes[i] - - for j in range(len(routes)): - if i == j: - continue - other_route = routes[j][0] - route_to_parent_complete = routes[j][1] - if route.contains(other_route) or ( - route_to_parent_complete and route.contains( - route_to_parent_complete)): - taken_routes = [r for r in routes if r[0] != route] - taken_routes = [r for r in [r[0] for r - in taken_routes] + - [r[1] for r in taken_routes] if r] - (route, - route_to_parent_complete) = s.get_path_to_transition( - transition, state, workflow_parents, - taken_routes=taken_routes) - for r in taken_routes: - assert not route.contains(r) - routes[ - i] = (route, route_to_parent_complete, transition, - state, workflow_parents) - retry = True - retry_count += 1 - break - if retry: - break - - for r in routes: - s.add_route(r[0]) - - workflow._busy_with_restore = True - try: - if len(state_list) <= 1: - workflow.cancel(success=True) - return - s.go(workflow) - finally: - workflow._busy_with_restore = False - - def _check_spec_version(self, v): - # We only have one version right now: - assert v == self.STATE_SPEC_VERSION diff --git a/SpiffWorkflow/bpmn/serializer/Packager.py b/SpiffWorkflow/bpmn/serializer/Packager.py deleted file mode 100644 index f2f6944d9..000000000 --- a/SpiffWorkflow/bpmn/serializer/Packager.py +++ /dev/null @@ -1,548 +0,0 @@ -# -*- coding: utf-8 -*- -from builtins import object -# Copyright (C) 2012 Matthew Hampton -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301 USA - -import os -import configparser -import glob -import hashlib -import inspect -import zipfile -from io import StringIO -from optparse import OptionParser, OptionGroup -from ..parser.BpmnParser import BpmnParser -from ..parser.ValidationException import ValidationException -from ..parser.util import xpath_eval, one -from lxml import etree -SIGNAVIO_NS = 'http://www.signavio.com' -CONFIG_SECTION_NAME = "Packager Options" - - -def md5hash(data): - if not isinstance(data, bytes): - data = data.encode('UTF-8') - - return hashlib.md5(data).hexdigest().lower() - - -class Packager(object): - """ - The Packager class pre-parses a set of BPMN files (together with their SVG - representation), validates the contents and then produces a ZIP-based - archive containing the pre-parsed BPMN and SVG files, the source files (for - reference) and a metadata.ini file that contains enough information to - create a BpmnProcessSpec instance from the archive (e.g. the ID of the - entry point process). - - This class can be extended and any public method overridden to do - additional validation / parsing or to package additional metadata. - - Extension point: - - PARSER_CLASS: provide the class that should be used to parse the BPMN - files. The fully-qualified name will be included in the metadata.ini file, - so that the BpmnSerializer can instantiate the right parser to deal with - the package. - - Editor hooks: package_for_editor_(self, spec, filename): - Called once for each BPMN file. Should add any additional files to the - archive. - """ - - METADATA_FILE = "metadata.ini" - MANIFEST_FILE = "manifest.ini" - PARSER_CLASS = BpmnParser - - def __init__(self, package_file, entry_point_process, meta_data=None, - editor=None): - """ - Constructor. - - :param package_file: a file-like object where the contents of the - package must be written to - - :param entry_point_process: the name or ID of the entry point process - - :param meta_data: A list of meta-data tuples to include in the - metadata.ini file (in addition to the standard ones) - - :param editor: The name of the editor used to create the source BPMN / - SVG files. This activates additional hook method calls. (optional) - """ - self.package_file = package_file - self.entry_point_process = entry_point_process - self.parser = self.PARSER_CLASS() - self.meta_data = meta_data or [] - self.input_files = [] - self.input_path_prefix = None - self.editor = editor - self.manifest = {} - - def add_bpmn_file(self, filename): - """ - Add the given BPMN filename to the packager's set. - """ - self.add_bpmn_files([filename]) - - def add_bpmn_files_by_glob(self, g): - """ - Add all filenames matching the provided pattern (e.g. *.bpmn) to the - packager's set. - """ - self.add_bpmn_files(glob.glob(g)) - - def add_bpmn_files(self, filenames): - """ - Add all filenames in the given list to the packager's set. - """ - self.input_files += filenames - - def create_package(self): - """ - Creates the package, writing the data out to the provided file-like - object. - """ - - # Check that all files exist (and calculate the longest shared path - # prefix): - self.input_path_prefix = None - for filename in self.input_files: - if not os.path.isfile(filename): - raise ValueError( - '%s does not exist or is not a file' % filename) - if self.input_path_prefix: - full = os.path.abspath(os.path.dirname(filename)) - while not (full.startswith(self.input_path_prefix) and - self.input_path_prefix): - self.input_path_prefix = self.input_path_prefix[:-1] - else: - self.input_path_prefix = os.path.abspath( - os.path.dirname(filename)) - - # Parse all of the XML: - self.bpmn = {} - for filename in self.input_files: - bpmn = etree.parse(filename) - self.bpmn[os.path.abspath(filename)] = bpmn - - # Now run through pre-parsing and validation: - for filename, bpmn in list(self.bpmn.items()): - bpmn = self.pre_parse_and_validate(bpmn, filename) - self.bpmn[os.path.abspath(filename)] = bpmn - - # Now check that we can parse it fine: - for filename, bpmn in list(self.bpmn.items()): - self.parser.add_bpmn_xml(bpmn, filename=filename) - # at this point, we have a item in self.wf_spec.get_specs_depth_first() - # that has a filename of None and a bpmn that needs to be added to the - # list below in for spec. - self.wf_spec = self.parser.get_spec(self.entry_point_process) - - # Now package everything: - self.package_zip = zipfile.ZipFile( - self.package_file, "w", compression=zipfile.ZIP_DEFLATED) - - done_files = set() - - for spec in self.wf_spec.get_specs_depth_first(): - filename = spec.file - if filename is None: - # This is for when we are doing a subworkflow, and it - # creates something in the bpmn spec list, but it really has - # no file. In this case, it is safe to skip the add to the - # zip file. - continue - if filename not in done_files: - done_files.add(filename) - - bpmn = self.bpmn[os.path.abspath(filename)] - self.write_to_package_zip( - "%s.bpmn" % spec.name, etree.tostring(bpmn.getroot())) - - self.write_to_package_zip( - "src/" + self._get_zip_path(filename), filename) - - self._call_editor_hook('package_for_editor', spec, filename) - - self.write_meta_data() - self.write_manifest() - - self.package_zip.close() - - def write_file_to_package_zip(self, filename, src_filename): - """ - Writes a local file in to the zip file and adds it to the manifest - dictionary - - :param filename: The zip file name - - :param src_filename: the local file name - """ - f = open(src_filename) - with f: - data = f.read() - self.manifest[filename] = md5hash(data) - self.package_zip.write(src_filename, filename) - - def write_to_package_zip(self, filename, data): - """ - Writes data to the zip file and adds it to the manifest dictionary - - :param filename: The zip file name - - :param data: the data - """ - self.manifest[filename] = md5hash(data) - self.package_zip.writestr(filename, data) - - def write_manifest(self): - """ - Write the manifest content to the zip file. It must be a predictable - order. - """ - config = configparser.ConfigParser() - - config.add_section('Manifest') - - for f in sorted(self.manifest.keys()): - config.set('Manifest', f.replace( - '\\', '/').lower(), self.manifest[f]) - - ini = StringIO() - config.write(ini) - self.manifest_data = ini.getvalue() - self.package_zip.writestr(self.MANIFEST_FILE, self.manifest_data) - - def pre_parse_and_validate(self, bpmn, filename): - """ - A subclass can override this method to provide additional parseing or - validation. It should call the parent method first. - - :param bpmn: an lxml tree of the bpmn content - - :param filename: the source file name - - This must return the updated bpmn object (or a replacement) - """ - bpmn = self._call_editor_hook( - 'pre_parse_and_validate', bpmn, filename) or bpmn - - return bpmn - - def pre_parse_and_validate_signavio(self, bpmn, filename): - """ - This is the Signavio specific editor hook for pre-parsing and - validation. - - A subclass can override this method to provide additional parseing or - validation. It should call the parent method first. - - :param bpmn: an lxml tree of the bpmn content - - :param filename: the source file name - - This must return the updated bpmn object (or a replacement) - """ - self._check_for_disconnected_boundary_events_signavio(bpmn, filename) - self._fix_call_activities_signavio(bpmn, filename) - return bpmn - - def _check_for_disconnected_boundary_events_signavio(self, bpmn, filename): - # signavio sometimes disconnects a BoundaryEvent from it's owning task - # They then show up as intermediateCatchEvents without any incoming - # sequence flows - xpath = xpath_eval(bpmn) - for catch_event in xpath('.//bpmn:intermediateCatchEvent'): - incoming = xpath( - './/bpmn:sequenceFlow[@targetRef="%s"]' % - catch_event.get('id')) - if not incoming: - raise ValidationException( - 'Intermediate Catch Event has no incoming sequences. ' - 'This might be a Boundary Event that has been ' - 'disconnected.', - node=catch_event, filename=filename) - - def _fix_call_activities_signavio(self, bpmn, filename): - """ - Signavio produces slightly invalid BPMN for call activity nodes... It - is supposed to put a reference to the id of the called process in to - the calledElement attribute. Instead it stores a string (which is the - name of the process - not its ID, in our interpretation) in an - extension tag. - - This code gets the name of the 'subprocess reference', finds a process - with a matching name, and sets the calledElement attribute to the id of - the process. - """ - for node in xpath_eval(bpmn)(".//bpmn:callActivity"): - calledElement = node.get('calledElement', None) - if not calledElement: - signavioMetaData = xpath_eval(node, extra_ns={ - 'signavio': SIGNAVIO_NS})( - './/signavio:signavioMetaData[@metaKey="entry"]') - if not signavioMetaData: - raise ValidationException( - 'No Signavio "Subprocess reference" specified.', - node=node, filename=filename) - subprocess_reference = one(signavioMetaData).get('metaValue') - matches = [] - for b in list(self.bpmn.values()): - for p in xpath_eval(b)(".//bpmn:process"): - if (p.get('name', p.get('id', None)) == - subprocess_reference): - matches.append(p) - if not matches: - raise ValidationException( - "No matching process definition found for '%s'." % - subprocess_reference, node=node, filename=filename) - if len(matches) != 1: - raise ValidationException( - "More than one matching process definition " - " found for '%s'." % subprocess_reference, node=node, - filename=filename) - - node.set('calledElement', matches[0].get('id')) - - def _call_editor_hook(self, hook, *args, **kwargs): - if self.editor: - hook_func = getattr(self, "%s_%s" % (hook, self.editor), None) - if hook_func: - return hook_func(*args, **kwargs) - return None - - def package_for_editor_signavio(self, spec, filename): - """ - Adds the SVG files to the archive for this BPMN file. - """ - signavio_file = filename[:-len('.bpmn20.xml')] + '.signavio.xml' - if os.path.exists(signavio_file): - self.write_file_to_package_zip( - "src/" + self._get_zip_path(signavio_file), signavio_file) - - f = open(signavio_file, 'r') - try: - signavio_tree = etree.parse(f) - finally: - f.close() - svg_node = one(signavio_tree.findall('.//svg-representation')) - self.write_to_package_zip("%s.svg" % spec.name, svg_node.text) - - def write_meta_data(self): - """ - Writes the metadata.ini file to the archive. - """ - config = configparser.ConfigParser() - - config.add_section('MetaData') - config.set('MetaData', 'entry_point_process', self.wf_spec.name) - if self.editor: - config.set('MetaData', 'editor', self.editor) - - for k, v in self.meta_data: - config.set('MetaData', k, v) - - if not self.PARSER_CLASS == BpmnParser: - config.set('MetaData', 'parser_class_module', - inspect.getmodule(self.PARSER_CLASS).__name__) - config.set('MetaData', 'parser_class', self.PARSER_CLASS.__name__) - - ini = StringIO() - config.write(ini) - self.write_to_package_zip(self.METADATA_FILE, ini.getvalue()) - - def _get_zip_path(self, filename): - p = os.path.abspath(filename)[ - len(self.input_path_prefix):].replace(os.path.sep, '/') - while p.startswith('/'): - p = p[1:] - return p - - @classmethod - def get_version(cls): - try: - import pkg_resources # part of setuptools - version = pkg_resources.require("SpiffWorkflow")[0].version - except Exception: - version = 'DEV' - return version - - @classmethod - def create_option_parser(cls): - """ - Override in subclass if required. - """ - return OptionParser( - usage=("%prog [options] -o -p " - " "), - version="SpiffWorkflow BPMN Packager %s" % (cls.get_version())) - - @classmethod - def add_main_options(cls, parser): - """ - Override in subclass if required. - """ - parser.add_option("-o", "--output", dest="package_file", - help="create the BPMN package in the specified file") - parser.add_option("-p", "--process", dest="entry_point_process", - help="specify the entry point process") - parser.add_option("-c", "--config-file", dest="config_file", - help="specify a config file to use") - parser.add_option( - "-i", "--initialise-config-file", action="store_true", - dest="init_config_file", default=False, - help="create a new config file from the specified options") - - group = OptionGroup(parser, "BPMN Editor Options", - "These options are not required, but may be " - " provided to activate special features of " - "supported BPMN editors.") - group.add_option("--editor", dest="editor", - help="editors with special support: signavio") - parser.add_option_group(group) - - @classmethod - def add_additional_options(cls, parser): - """ - Override in subclass if required. - """ - group = OptionGroup(parser, "Target Engine Options", - "These options are not required, but may be " - "provided if a specific " - "BPMN application engine is targeted.") - group.add_option("-e", "--target-engine", dest="target_engine", - help="target the specified BPMN application engine") - group.add_option( - "-t", "--target-version", dest="target_engine_version", - help="target the specified version of the BPMN application engine") - parser.add_option_group(group) - - @classmethod - def check_args(cls, config, options, args, parser, package_file=None): - """ - Override in subclass if required. - """ - if not args: - parser.error("no input files specified") - if not (package_file or options.package_file): - parser.error("no package file specified") - if not options.entry_point_process: - parser.error("no entry point process specified") - - @classmethod - def merge_options_and_config(cls, config, options, args): - """ - Override in subclass if required. - """ - if args: - config.set(CONFIG_SECTION_NAME, 'input_files', ','.join(args)) - elif config.has_option(CONFIG_SECTION_NAME, 'input_files'): - for i in config.get(CONFIG_SECTION_NAME, 'input_files').split(','): - if not os.path.isabs(i): - i = os.path.abspath( - os.path.join(os.path.dirname(options.config_file), i)) - args.append(i) - - cls.merge_option_and_config_str('package_file', config, options) - cls.merge_option_and_config_str('entry_point_process', config, options) - cls.merge_option_and_config_str('target_engine', config, options) - cls.merge_option_and_config_str( - 'target_engine_version', config, options) - cls.merge_option_and_config_str('editor', config, options) - - @classmethod - def merge_option_and_config_str(cls, option_name, config, options): - """ - Utility method to merge an option and config, with the option taking " - precedence - """ - - opt = getattr(options, option_name, None) - if opt: - config.set(CONFIG_SECTION_NAME, option_name, opt) - elif config.has_option(CONFIG_SECTION_NAME, option_name): - setattr(options, option_name, config.get( - CONFIG_SECTION_NAME, option_name)) - - @classmethod - def create_meta_data(cls, options, args, parser): - """ - Override in subclass if required. - """ - meta_data = [] - meta_data.append(('spiff_version', cls.get_version())) - if options.target_engine: - meta_data.append(('target_engine', options.target_engine)) - if options.target_engine: - meta_data.append( - ('target_engine_version', options.target_engine_version)) - return meta_data - - @classmethod - def main(cls, argv=None, package_file=None): - parser = cls.create_option_parser() - - cls.add_main_options(parser) - - cls.add_additional_options(parser) - - (options, args) = parser.parse_args(args=argv) - - config = configparser.ConfigParser() - if options.config_file: - config.read(options.config_file) - if not config.has_section(CONFIG_SECTION_NAME): - config.add_section(CONFIG_SECTION_NAME) - - cls.merge_options_and_config(config, options, args) - if options.init_config_file: - if not options.config_file: - parser.error( - "no config file specified - cannot initialise config file") - f = open(options.config_file, "w") - with f: - config.write(f) - return - - cls.check_args(config, options, args, parser, package_file) - - meta_data = cls.create_meta_data(options, args, parser) - - packager = cls(package_file=package_file or options.package_file, - entry_point_process=options.entry_point_process, - meta_data=meta_data, editor=options.editor) - for a in args: - packager.add_bpmn_files_by_glob(a) - packager.create_package() - - return packager - - -def main(packager_class=None): - """ - :param packager_class: The Packager class to use. Default: Packager. - """ - - if not packager_class: - packager_class = Packager - - packager_class.main() - - -if __name__ == '__main__': - main() diff --git a/SpiffWorkflow/bpmn/serializer/bpmn_converters.py b/SpiffWorkflow/bpmn/serializer/bpmn_converters.py index 9fa80ad61..5d604fa46 100644 --- a/SpiffWorkflow/bpmn/serializer/bpmn_converters.py +++ b/SpiffWorkflow/bpmn/serializer/bpmn_converters.py @@ -12,7 +12,7 @@ from ..specs.events.event_definitions import TimerEventDefinition, CycleTimerEve from ..specs.events.event_definitions import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition -from ..specs.BpmnSpecMixin import BpmnSpecMixin, SequenceFlow +from ..specs.BpmnSpecMixin import BpmnSpecMixin from ...operators import Attrib, PathAttrib @@ -100,7 +100,6 @@ class BpmnTaskSpecConverter(DictionaryConverter): partial(self.event_defintion_from_dict, event_definition) ) - self.register(SequenceFlow, self.sequence_flow_to_dict, self.sequence_flow_from_dict) self.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib)) self.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib)) self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict) @@ -160,12 +159,6 @@ class BpmnTaskSpecConverter(DictionaryConverter): 'documentation': spec.documentation, 'loopTask': spec.loopTask, 'position': spec.position, - 'outgoing_sequence_flows': dict( - (k, self.convert(v)) for k, v in spec.outgoing_sequence_flows.items() - ), - 'outgoing_sequence_flows_by_id': dict( - (k, self.convert(v)) for k, v in spec.outgoing_sequence_flows_by_id.items() - ), 'data_input_associations': [ self.convert(obj) for obj in spec.data_input_associations ], 'data_output_associations': [ self.convert(obj) for obj in spec.data_output_associations ], } @@ -224,8 +217,6 @@ class BpmnTaskSpecConverter(DictionaryConverter): spec.documentation = dct.pop('documentation', None) spec.lane = dct.pop('lane', None) spec.loopTask = dct.pop('loopTask', False) - spec.outgoing_sequence_flows = self.restore(dct.pop('outgoing_sequence_flows', {})) - spec.outgoing_sequence_flows_by_id = self.restore(dct.pop('outgoing_sequence_flows_by_id', {})) spec.data_input_associations = self.restore(dct.pop('data_input_associations', [])) spec.data_output_associations = self.restore(dct.pop('data_output_associations', [])) @@ -283,17 +274,6 @@ class BpmnTaskSpecConverter(DictionaryConverter): event_definition.external = external return event_definition - def sequence_flow_to_dict(self, flow): - return { - 'id': flow.id, - 'name': flow.name, - 'documentation': flow.documentation, - 'target_task_spec': flow.target_task_spec.name - } - - def sequence_flow_from_dict(self, dct): - return SequenceFlow(**dct) - def attrib_to_dict(self, attrib): return { 'name': attrib.name } diff --git a/SpiffWorkflow/bpmn/serializer/dict.py b/SpiffWorkflow/bpmn/serializer/dict.py deleted file mode 100644 index 237b21a10..000000000 --- a/SpiffWorkflow/bpmn/serializer/dict.py +++ /dev/null @@ -1,259 +0,0 @@ -# -*- coding: utf-8 -*- - - -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301 USA - -from ...camunda.specs.UserTask import UserTask -from ...dmn.engine.DMNEngine import DMNEngine -from ...dmn.specs.BusinessRuleTask import BusinessRuleTask -from ...dmn.specs.model import DecisionTable -from ...serializer.dict import DictionarySerializer -from ...util.impl import get_class -from ..specs.BpmnSpecMixin import SequenceFlow -from ..specs.ExclusiveGateway import ExclusiveGateway -from ..specs.MultiInstanceTask import MultiInstanceTask -from ..specs.ScriptTask import ScriptTask -from ..specs.SubWorkflowTask import SubWorkflowTask - - -class BPMNDictionarySerializer(DictionarySerializer): - - def serialize_task_spec(self, spec): - s_state = super().serialize_task_spec(spec) - - if hasattr(spec,'documentation'): - s_state['documentation'] = spec.documentation - if hasattr(spec,'extensions'): - s_state['extensions'] = self.serialize_dict(spec.extensions) - if hasattr(spec,'lane'): - s_state['lane'] = spec.lane - - if hasattr(spec,'outgoing_sequence_flows'): - s_state['outgoing_sequence_flows'] = {x:spec.outgoing_sequence_flows[x].serialize() for x in - spec.outgoing_sequence_flows.keys()} - s_state['outgoing_sequence_flows_by_id'] = {x:spec.outgoing_sequence_flows_by_id[x].serialize() for x in - spec.outgoing_sequence_flows_by_id.keys()} - - # Note: Events are not serialized; this is documented in - # the TaskSpec API docs. - - return s_state - - def deserialize_task_spec(self, wf_spec, s_state, spec): - spec = super().deserialize_task_spec(wf_spec, s_state, spec) - # I would use the s_state.get('extensions',{}) inside of the deserialize - # but many tasks have no extensions on them. - if s_state.get('extensions',None) != None: - spec.extensions = self.deserialize_dict(s_state['extensions']) - if 'documentation' in s_state.keys(): - spec.documentation = s_state['documentation'] - - if 'lane' in s_state.keys(): - spec.lane = s_state.get('lane',None) - if s_state.get('outgoing_sequence_flows',None): - spec.outgoing_sequence_flows = s_state.get('outgoing_sequence_flows', {}) - spec.outgoing_sequence_flows_by_id = s_state.get('outgoing_sequence_flows_by_id', {}) - - return spec - - def serialize_exclusive_gateway(self, spec): - s_state = self.serialize_multi_choice(spec) - s_state['default_task_spec'] = spec.default_task_spec - return s_state - - def deserialize_exclusive_gateway(self, wf_spec, s_state): - spec = ExclusiveGateway(wf_spec, s_state['name']) - self.deserialize_multi_choice(wf_spec, s_state, spec=spec) - spec.default_task_spec = s_state['default_task_spec'] - return spec - - def serialize_script_task(self, spec): - s_state = self.serialize_task_spec(spec) - s_state['script'] = spec.script - return s_state - - def deserialize_script_task(self, wf_spec, s_state): - spec = ScriptTask(wf_spec, s_state['name'], s_state['script']) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - def serialize_subworkflow_task(self, spec): - s_state = self.serialize_task_spec(spec) - s_state['wf_class'] = spec.wf_class.__module__ + "." + spec.wf_class.__name__ - s_state['spec'] = self.serialize_workflow_spec(spec.spec) - return s_state - - def deserialize_subworkflow_task(self, wf_spec, s_state, cls): - spec = cls(wf_spec, s_state['name']) - spec.wf_class = get_class(s_state['wf_class']) - if 'spec_name' in s_state: - s_state['spec'] = self.SPEC_STATES[s_state['spec_name']] - spec.spec = self.deserialize_workflow_spec(s_state['spec']) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - def serialize_generic_event(self, spec): - s_state = self.serialize_task_spec(spec) - if spec.event_definition: - s_state['event_definition'] = spec.event_definition.serialize() - else: - s_state['event_definition'] = None - return s_state - - def deserialize_generic_event(self, wf_spec, s_state, cls): - if s_state.get('event_definition',None): - evtcls = get_class(s_state['event_definition']['classname']) - event = evtcls.deserialize(s_state['event_definition']) - else: - event = None - spec = cls(wf_spec, s_state['name'], event) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - def serialize_boundary_event_parent(self, spec): - s_state = self.serialize_task_spec(spec) - s_state['main_child_task_spec'] = spec.main_child_task_spec.id - return s_state - - def deserialize_boundary_event_parent(self, wf_spec, s_state, cls): - - main_child_task_spec = wf_spec.get_task_spec_from_id(s_state['main_child_task_spec']) - spec = cls(wf_spec, s_state['name'], main_child_task_spec) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - def serialize_boundary_event(self, spec): - s_state = self.serialize_task_spec(spec) - if spec.cancel_activity: - s_state['cancel_activity'] = spec.cancel_activity - else: - s_state['cancel_activity'] = None - if spec.event_definition: - s_state['event_definition'] = spec.event_definition.serialize() - else: - s_state['event_definition'] = None - return s_state - - def deserialize_boundary_event(self, wf_spec, s_state, cls): - cancel_activity = s_state.get('cancel_activity',None) - if s_state['event_definition']: - eventclass = get_class(s_state['event_definition']['classname']) - event = eventclass.deserialize(s_state['event_definition']) - else: - event = None - spec = cls(wf_spec, s_state['name'], cancel_activity=cancel_activity,event_definition=event) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - def serialize_user_task(self, spec): - s_state = self.serialize_task_spec(spec) - s_state['form'] = spec.form - return s_state - - def deserialize_user_task(self, wf_spec, s_state): - spec = UserTask(wf_spec, s_state['name'], s_state['form']) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - - def serialize_business_rule_task(self, spec): - s_state = self.serialize_task_spec(spec) - dictrep = spec.dmnEngine.decision_table.serialize() - # future - s_state['dmn'] = dictrep - return s_state - - def deserialize_business_rule_task(self, wf_spec, s_state): - dt = DecisionTable(None, None, None) - dt.deserialize(s_state['dmn']) - dmn_engine = DMNEngine(dt) - spec = BusinessRuleTask(wf_spec, s_state['name'], dmn_engine) - self.deserialize_task_spec(wf_spec, s_state, spec=spec) - return spec - - def serialize_multi_instance(self, spec): - s_state = super().serialize_multi_instance(spec) - # here we need to add in all of the things that would get serialized - # for other classes that the MultiInstance could be - - # - if hasattr(spec,'form'): - s_state['form'] = spec.form - - if isinstance(spec,MultiInstanceTask): - s_state['collection'] = self.serialize_arg(spec.collection) - s_state['elementVar'] = self.serialize_arg(spec.elementVar) - s_state['completioncondition'] = self.serialize_arg(spec.completioncondition) - s_state['isSequential'] = self.serialize_arg(spec.isSequential) - s_state['loopTask'] = self.serialize_arg(spec.loopTask) - if (hasattr(spec,'expanded')): - s_state['expanded'] = self.serialize_arg(spec.expanded) - if isinstance(spec,BusinessRuleTask): - br_state = self.serialize_business_rule_task(spec) - s_state['dmn'] = br_state['dmn'] - if isinstance(spec, ScriptTask): - br_state = self.serialize_script_task(spec) - s_state['script'] = br_state['script'] - if isinstance(spec, SubWorkflowTask): - br_state = self.serialize_subworkflow(spec) - s_state['wf_class'] = br_state['wf_class'] - s_state['spec'] = br_state['spec'] - - return s_state - - def deserialize_multi_instance(self, wf_spec, s_state, cls=None): - cls = super().deserialize_multi_instance(wf_spec, s_state, cls) - if isinstance(cls,MultiInstanceTask): - cls.isSequential = self.deserialize_arg(s_state['isSequential']) - cls.loopTask = self.deserialize_arg(s_state['loopTask']) - cls.elementVar = self.deserialize_arg(s_state['elementVar']) - cls.completioncondition = self.deserialize_arg(s_state['completioncondition']) - cls.collection = self.deserialize_arg(s_state['collection']) - if s_state.get('expanded',None): - cls.expanded = self.deserialize_arg(s_state['expanded']) - if isinstance(cls,BusinessRuleTask): - dt = DecisionTable(None,None,None) - dt.deserialize(s_state['dmn']) - dmn_engine = DMNEngine(dt) - cls.dmnEngine=dmn_engine - if isinstance(cls, ScriptTask): - cls.script = s_state['script'] - if isinstance(cls, SubWorkflowTask): - cls.wf_class = get_class(s_state['wf_class']) - cls.spec = self.deserialize_workflow_spec(s_state['spec']) - - if s_state.get('form',None): - cls.form = s_state['form'] - - return cls - - def _deserialize_workflow_spec_task_spec(self, spec, task_spec, name): - if hasattr(task_spec,'outgoing_sequence_flows'): - for entry,value in task_spec.outgoing_sequence_flows.items(): - task_spec.outgoing_sequence_flows[entry] = \ - SequenceFlow(value['id'], - value['name'], - value['documentation'], - spec.get_task_spec_from_id(value['target_task_spec'])) - for entry, value in task_spec.outgoing_sequence_flows_by_id.items(): - task_spec.outgoing_sequence_flows_by_id[entry] = \ - SequenceFlow(value['id'], - value['name'], - value['documentation'], - spec.get_task_spec_from_id(value['target_task_spec'])) - super()._deserialize_workflow_spec_task_spec(spec, task_spec, name) - - def _prevtaskclass_bases(self, oldtask): - return (MultiInstanceTask, oldtask) diff --git a/SpiffWorkflow/bpmn/serializer/json.py b/SpiffWorkflow/bpmn/serializer/json.py deleted file mode 100644 index 5b236f9bc..000000000 --- a/SpiffWorkflow/bpmn/serializer/json.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- - -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301 USA -import json -from ..serializer.dict import BPMNDictionarySerializer -from ...camunda.specs.UserTask import Form -from ...serializer.json import JSONSerializer - -class BPMNJSONSerializer(BPMNDictionarySerializer, JSONSerializer): - - def _object_hook(self, dct): - if '__form__' in dct: - return Form(init=json.loads(dct['__form__'])) - - return super()._object_hook(dct) - - def _default(self, obj): - if isinstance(obj,Form): - return {'__form__': json.dumps(obj, default=lambda o: - self._jsonableHandler(o))} - - return super()._default(obj) diff --git a/SpiffWorkflow/bpmn/serializer/workflow.py b/SpiffWorkflow/bpmn/serializer/workflow.py index f8e70048d..8449b3004 100644 --- a/SpiffWorkflow/bpmn/serializer/workflow.py +++ b/SpiffWorkflow/bpmn/serializer/workflow.py @@ -138,9 +138,9 @@ class BpmnWorkflowSerializer: dct = json.loads(serialization, cls=self.json_decoder_cls) return dct - def deserialize_json(self, serialization, read_only=False, use_gzip=False): + def deserialize_json(self, serialization, use_gzip=False): dct = self.__get_dict(serialization, use_gzip) - return self.workflow_from_dict(dct, read_only) + return self.workflow_from_dict(dct) def get_version(self, serialization, use_gzip=False): try: @@ -171,11 +171,10 @@ class BpmnWorkflowSerializer: dct['bpmn_messages'] = [self.message_to_dict(msg) for msg in workflow.bpmn_messages] return dct - def workflow_from_dict(self, dct, read_only=False): + def workflow_from_dict(self, dct): """Create a workflow based on a dictionary representation. :param dct: the dictionary representation - :param read_only: optionally disable modifying the workflow Returns: a BPMN Workflow object @@ -195,7 +194,7 @@ class BpmnWorkflowSerializer: subprocess_specs[name] = self.spec_converter.restore(wf_dct) # Create the top-level workflow - workflow = self.wf_class(spec, subprocess_specs, read_only=read_only, deserializing=True) + workflow = self.wf_class(spec, subprocess_specs, deserializing=True) # Restore any unretrieve messages workflow.bpmn_messages = [ self.message_from_dict(msg) for msg in dct.get('bpmn_messages', []) ] @@ -256,7 +255,7 @@ class BpmnWorkflowSerializer: if isinstance(task_spec, SubWorkflowTask) and task_id in top_dct.get('subprocesses', {}): subprocess_spec = top.subprocess_specs[task_spec.spec] - subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, read_only=top.read_only) + subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process) subprocess_dct = top_dct['subprocesses'].get(task_id, {}) subprocess.data = self.data_converter.restore(subprocess_dct.pop('data')) subprocess.success = subprocess_dct.pop('success') diff --git a/SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py b/SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py index 566fe7c8e..13aae1a9c 100644 --- a/SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py +++ b/SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py @@ -2,7 +2,6 @@ from .bpmn_converters import BpmnWorkflowSpecConverter from ..specs.BpmnProcessSpec import BpmnProcessSpec from ..specs.MultiInstanceTask import MultiInstanceTask, getDynamicMIClass -from ..specs.BpmnSpecMixin import BpmnSpecMixin from ..specs.events.IntermediateEvent import _BoundaryEventParent from ...operators import Attrib, PathAttrib @@ -167,11 +166,6 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): # Now we have to go back and fix all the circular references to everything for task_spec in spec.task_specs.values(): - if isinstance(task_spec, BpmnSpecMixin): - for flow in task_spec.outgoing_sequence_flows.values(): - flow.target_task_spec = spec.get_task_spec_from_name(flow.target_task_spec) - for flow in task_spec.outgoing_sequence_flows_by_id.values(): - flow.target_task_spec = spec.get_task_spec_from_name(flow.target_task_spec) if isinstance(task_spec, _BoundaryEventParent): task_spec.main_child_task_spec = spec.get_task_spec_from_name(task_spec.main_child_task_spec) task_spec.inputs = [ spec.get_task_spec_from_name(name) for name in task_spec.inputs ] diff --git a/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py b/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py index e076ced22..108370d72 100644 --- a/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py +++ b/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py @@ -57,13 +57,6 @@ class _EndJoin(UnstructuredJoin): super(_EndJoin, self)._on_complete_hook(my_task) my_task.workflow.data.update(my_task.data) - def serialize(self, serializer): - return serializer.serialize_join(self) - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_join(wf_spec, s_state, _EndJoin) - class BpmnDataSpecification: diff --git a/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py b/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py index 584c4247c..444863ffc 100644 --- a/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py +++ b/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py @@ -17,7 +17,6 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA -from ...task import TaskState from ...operators import Operator from ...specs.base import TaskSpec @@ -33,27 +32,6 @@ class _BpmnCondition(Operator): return task.workflow.script_engine.evaluate(task, self.args[0]) -class SequenceFlow(object): - - """ - Keeps information relating to a sequence flow - """ - - def __init__(self, id, name, documentation, target_task_spec): - """ - Constructor. - """ - self.id = id - self.name = name.strip() if name else name - self.documentation = documentation - self.target_task_spec = target_task_spec - - def serialize(self): - return {'id':self.id, - 'name':self.name, - 'documentation':self.documentation, - 'target_task_spec':self.target_task_spec.id} - class BpmnSpecMixin(TaskSpec): """ @@ -69,8 +47,6 @@ class BpmnSpecMixin(TaskSpec): (optional). """ super(BpmnSpecMixin, self).__init__(wf_spec, name, **kwargs) - self.outgoing_sequence_flows = {} - self.outgoing_sequence_flows_by_id = {} self.lane = lane self.position = position or {'x': 0, 'y': 0} self.loopTask = False @@ -88,105 +64,13 @@ class BpmnSpecMixin(TaskSpec): """ return self.loopTask - def connect_outgoing(self, taskspec, sequence_flow_id, sequence_flow_name, - documentation): - """ - Connect this task spec to the indicated child. - - :param sequence_flow_id: The ID of the connecting sequenceFlow node. - - :param sequence_flow_name: The name of the connecting sequenceFlow - node. - """ - self.connect(taskspec) - s = SequenceFlow( - sequence_flow_id, sequence_flow_name, documentation, taskspec) - self.outgoing_sequence_flows[taskspec.name] = s - self.outgoing_sequence_flows_by_id[sequence_flow_id] = s - - def connect_outgoing_if(self, condition, taskspec, sequence_flow_id, - sequence_flow_name, documentation): + def connect_outgoing_if(self, condition, taskspec): """ Connect this task spec to the indicated child, if the condition evaluates to true. This should only be called if the task has a connect_if method (e.g. ExclusiveGateway). - - :param sequence_flow_id: The ID of the connecting sequenceFlow node. - - :param sequence_flow_name: The name of the connecting sequenceFlow - node. """ self.connect_if(_BpmnCondition(condition), taskspec) - s = SequenceFlow( - sequence_flow_id, sequence_flow_name, documentation, taskspec) - self.outgoing_sequence_flows[taskspec.name] = s - self.outgoing_sequence_flows_by_id[sequence_flow_id] = s - - def get_outgoing_sequence_flow_by_spec(self, task_spec): - """ - Returns the outgoing SequenceFlow targeting the specified task_spec. - """ - return self.outgoing_sequence_flows[task_spec.name] - - def get_outgoing_sequence_flow_by_id(self, id): - """ - Returns the outgoing SequenceFlow with the specified ID. - """ - return self.outgoing_sequence_flows_by_id[id] - - def has_outgoing_sequence_flow(self, id): - """ - Returns true if the SequenceFlow with the specified ID is leaving this - task. - """ - return id in self.outgoing_sequence_flows_by_id - - def get_outgoing_sequence_names(self): - """ - Returns a list of the names of outgoing sequences. Some may be None. - """ - return sorted([s.name for s in - list(self.outgoing_sequence_flows_by_id.values())]) - - def get_outgoing_sequences(self): - """ - Returns a list of outgoing sequences. Some may be None. - """ - return iter(list(self.outgoing_sequence_flows_by_id.values())) - - # Hooks for Custom BPMN tasks ########## - - def entering_waiting_state(self, my_task): - """ - Called when a task enters the WAITING state. - - A subclass may override this method to do work when this happens. - """ - pass - - def entering_ready_state(self, my_task): - """ - Called when a task enters the READY state. - - A subclass may override this method to do work when this happens. - """ - pass - - def entering_complete_state(self, my_task): - """ - Called when a task enters the COMPLETE state. - - A subclass may override this method to do work when this happens. - """ - pass - - def entering_cancelled_state(self, my_task): - """ - Called when a task enters the CANCELLED state. - - A subclass may override this method to do work when this happens. - """ - pass def _on_ready_hook(self, my_task): super()._on_ready_hook(my_task) @@ -205,26 +89,6 @@ class BpmnSpecMixin(TaskSpec): super(BpmnSpecMixin, self)._on_complete_hook(my_task) if isinstance(my_task.parent.task_spec, BpmnSpecMixin): my_task.parent.task_spec._child_complete_hook(my_task) - if not my_task.workflow._is_busy_with_restore(): - self.entering_complete_state(my_task) def _child_complete_hook(self, child_task): pass - - def _on_cancel(self, my_task): - super(BpmnSpecMixin, self)._on_cancel(my_task) - my_task.workflow._task_cancelled_notify(my_task) - if not my_task.workflow._is_busy_with_restore(): - self.entering_cancelled_state(my_task) - - def _update_hook(self, my_task): - prev_state = my_task.state - super(BpmnSpecMixin, self)._update_hook(my_task) - if (prev_state != TaskState.WAITING and my_task.state == TaskState.WAITING and - not my_task.workflow._is_busy_with_restore()): - self.entering_waiting_state(my_task) - - def _on_ready_before_hook(self, my_task): - super(BpmnSpecMixin, self)._on_ready_before_hook(my_task) - if not my_task.workflow._is_busy_with_restore(): - self.entering_ready_state(my_task) diff --git a/SpiffWorkflow/bpmn/specs/ExclusiveGateway.py b/SpiffWorkflow/bpmn/specs/ExclusiveGateway.py index 89a1445ff..0dc4500d6 100644 --- a/SpiffWorkflow/bpmn/specs/ExclusiveGateway.py +++ b/SpiffWorkflow/bpmn/specs/ExclusiveGateway.py @@ -41,22 +41,14 @@ class ExclusiveGateway(ExclusiveChoice, BpmnSpecMixin): # raise WorkflowException(self, 'At least one output required.') for condition, name in self.cond_task_specs: if name is None: - raise WorkflowException(self, 'Condition with no task spec.') + raise WorkflowException('Condition with no task spec.', task_spec=self) task_spec = self._wf_spec.get_task_spec_from_name(name) if task_spec is None: msg = 'Condition leads to non-existent task ' + repr(name) - raise WorkflowException(self, msg) + raise WorkflowException(msg, task_spec=self) if condition is None: continue @property def spec_type(self): return 'Exclusive Gateway' - - def serialize(self, serializer): - return serializer.serialize_exclusive_gateway(self) - - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_exclusive_gateway(wf_spec, s_state) diff --git a/SpiffWorkflow/bpmn/specs/InclusiveGateway.py b/SpiffWorkflow/bpmn/specs/InclusiveGateway.py index 05eaffd9d..21c1cfe13 100644 --- a/SpiffWorkflow/bpmn/specs/InclusiveGateway.py +++ b/SpiffWorkflow/bpmn/specs/InclusiveGateway.py @@ -117,7 +117,3 @@ class InclusiveGateway(UnstructuredJoin): done.add(child) q.append(child) return False - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_generic(wf_spec, s_state, InclusiveGateway) diff --git a/SpiffWorkflow/bpmn/specs/ManualTask.py b/SpiffWorkflow/bpmn/specs/ManualTask.py index c8d4e7c25..6e4c7ecaf 100644 --- a/SpiffWorkflow/bpmn/specs/ManualTask.py +++ b/SpiffWorkflow/bpmn/specs/ManualTask.py @@ -23,10 +23,6 @@ from ...specs.Simple import Simple class ManualTask(Simple, BpmnSpecMixin): - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_generic(wf_spec, s_state, ManualTask) - def is_engine_task(self): return False diff --git a/SpiffWorkflow/bpmn/specs/MultiInstanceTask.py b/SpiffWorkflow/bpmn/specs/MultiInstanceTask.py index 6a81c26fd..b56b0d1de 100644 --- a/SpiffWorkflow/bpmn/specs/MultiInstanceTask.py +++ b/SpiffWorkflow/bpmn/specs/MultiInstanceTask.py @@ -22,12 +22,12 @@ from builtins import range from uuid import uuid4 import re -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException from .SubWorkflowTask import SubWorkflowTask, CallActivity from .ParallelGateway import ParallelGateway from .ScriptTask import ScriptTask from .ExclusiveGateway import ExclusiveGateway from ...dmn.specs.BusinessRuleTask import BusinessRuleTask +from ...exceptions import WorkflowTaskException from ...operators import valueof, is_number from ...specs.SubWorkflow import SubWorkflow from ...specs.base import TaskSpec @@ -396,9 +396,10 @@ class MultiInstanceTask(TaskSpec): # look for variable in context, if we don't find it, default to 1 variable = valueof(my_task, self.times, 1) if self.times.name == self.collection.name and type(variable) == type([]): - raise WorkflowTaskExecException(my_task, - 'If we are updating a collection, then the collection must be a dictionary.') - + raise WorkflowTaskException( + 'If we are updating a collection, then the collection must be a dictionary.', + task=my_task) + def _get_current_var(self, my_task, pos): variable = valueof(my_task, self.times, 1) if is_number(variable): @@ -418,7 +419,7 @@ class MultiInstanceTask(TaskSpec): msg = f"There is a mismatch between runtimes and the number " \ f"items in the collection, please check for empty " \ f"collection {self.collection.name}." - raise WorkflowTaskExecException(my_task, msg) + raise WorkflowTaskException(msg, task=my_task) runtimesvar = keys[runtimes - 1] else: # Use an integer (for arrays) @@ -477,18 +478,6 @@ class MultiInstanceTask(TaskSpec): if not isinstance(my_task.task_spec,SubWorkflowTask): my_task._sync_children(outputs, TaskState.FUTURE) - def serialize(self, serializer): - - return serializer.serialize_multi_instance(self) - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - prevclass = get_class(s_state['prevtaskclass']) - spec = getDynamicMIClass(s_state['name'], prevclass)(wf_spec,s_state['name'],s_state['times']) - spec.prevtaskclass = s_state['prevtaskclass'] - - return serializer.deserialize_multi_instance(wf_spec, s_state, spec) - def getDynamicMIClass(id,prevclass): id = re.sub('(.+)_[0-9]$','\\1',id) diff --git a/SpiffWorkflow/bpmn/specs/NoneTask.py b/SpiffWorkflow/bpmn/specs/NoneTask.py index 5c844e90f..7d14a9b98 100644 --- a/SpiffWorkflow/bpmn/specs/NoneTask.py +++ b/SpiffWorkflow/bpmn/specs/NoneTask.py @@ -29,7 +29,3 @@ class NoneTask(Simple, BpmnSpecMixin): @property def spec_type(self): return 'Task' - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_generic(wf_spec, s_state, NoneTask) diff --git a/SpiffWorkflow/bpmn/specs/ParallelGateway.py b/SpiffWorkflow/bpmn/specs/ParallelGateway.py index a05c6ec07..3ade2ecc3 100644 --- a/SpiffWorkflow/bpmn/specs/ParallelGateway.py +++ b/SpiffWorkflow/bpmn/specs/ParallelGateway.py @@ -50,7 +50,3 @@ class ParallelGateway(UnstructuredJoin): @property def spec_type(self): return 'Parallel Gateway' - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_generic(wf_spec, s_state, ParallelGateway) diff --git a/SpiffWorkflow/bpmn/specs/ScriptTask.py b/SpiffWorkflow/bpmn/specs/ScriptTask.py index 0a83b00b1..310ea76fa 100644 --- a/SpiffWorkflow/bpmn/specs/ScriptTask.py +++ b/SpiffWorkflow/bpmn/specs/ScriptTask.py @@ -37,13 +37,6 @@ class ScriptEngineTask(Simple, BpmnSpecMixin): task._set_state(TaskState.WAITING) raise exc - def serialize(self, serializer): - return serializer.serialize_script_task(self) - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_script_task(wf_spec, s_state) - class ScriptTask(ScriptEngineTask): diff --git a/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py b/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py index 978ef7f55..4232ec078 100644 --- a/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py +++ b/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py @@ -90,13 +90,6 @@ class SubWorkflowTask(BpmnSpecMixin): my_task._set_state(TaskState.WAITING) - def serialize(self, serializer): - return serializer.serialize_subworkflow_task(self) - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_subworkflow_task(wf_spec, s_state, SubWorkflowTask) - def task_will_set_children_future(self, my_task): my_task.workflow.delete_subprocess(my_task) @@ -110,9 +103,6 @@ class CallActivity(SubWorkflowTask): def spec_type(self): return 'Call Activity' - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_subworkflow_task(wf_spec, s_state, CallActivity) class TransactionSubprocess(SubWorkflowTask): @@ -123,6 +113,3 @@ class TransactionSubprocess(SubWorkflowTask): def spec_type(self): return 'Transactional Subprocess' - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_subworkflow_task(wf_spec, s_state, TransactionSubprocess) diff --git a/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py b/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py index d2e45be31..ba739215d 100644 --- a/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py +++ b/SpiffWorkflow/bpmn/specs/UnstructuredJoin.py @@ -56,9 +56,8 @@ class UnstructuredJoin(Join, BpmnSpecMixin): task._has_state(TaskState.WAITING) or task == my_task): if task.parent.task_spec in completed_inputs: raise(WorkflowException - (task.task_spec, - "Unsupported looping behaviour: two threads waiting" - " on the same sequence flow.")) + ("Unsupported looping behaviour: two threads waiting" + " on the same sequence flow.", task_spec=self)) completed_inputs.add(task.parent.task_spec) else: waiting_tasks.append(task.parent) diff --git a/SpiffWorkflow/bpmn/specs/events/EndEvent.py b/SpiffWorkflow/bpmn/specs/events/EndEvent.py index 38dadc5a3..8f5bf665c 100644 --- a/SpiffWorkflow/bpmn/specs/events/EndEvent.py +++ b/SpiffWorkflow/bpmn/specs/events/EndEvent.py @@ -62,10 +62,3 @@ class EndEvent(ThrowingEvent): elif isinstance(self.event_definition, CancelEventDefinition): my_task.workflow.cancel() - - def serialize(self, serializer): - return serializer.serialize_generic_event(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_generic_event(wf_spec, s_state, EndEvent) diff --git a/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py b/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py index 798c04c6d..fd3c1560c 100644 --- a/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py +++ b/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py @@ -104,12 +104,6 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin): if child.task_spec == self.main_child_task_spec: child._set_state(state) - def serialize(self, serializer): - return serializer.serialize_boundary_event_parent(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_boundary_event_parent(wf_spec, s_state, cls) class BoundaryEvent(CatchingEvent): @@ -142,13 +136,6 @@ class BoundaryEvent(CatchingEvent): # Notify the boundary event parent as well. my_task.parent.task_spec._child_complete_hook(my_task) - def serialize(self, serializer): - return serializer.serialize_boundary_event(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_boundary_event(wf_spec, s_state, cls) - class EventBasedGateway(CatchingEvent): diff --git a/SpiffWorkflow/bpmn/specs/events/StartEvent.py b/SpiffWorkflow/bpmn/specs/events/StartEvent.py index 3513894ce..abd0973ee 100644 --- a/SpiffWorkflow/bpmn/specs/events/StartEvent.py +++ b/SpiffWorkflow/bpmn/specs/events/StartEvent.py @@ -40,11 +40,3 @@ class StartEvent(CatchingEvent): my_task._set_state(TaskState.WAITING) super(StartEvent, self).catch(my_task, event_definition) - - def serialize(self, serializer): - return serializer.serialize_generic_event(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_generic_event(wf_spec, s_state, StartEvent) - diff --git a/SpiffWorkflow/bpmn/specs/events/event_definitions.py b/SpiffWorkflow/bpmn/specs/events/event_definitions.py index fc1cb2f06..3a73690ec 100644 --- a/SpiffWorkflow/bpmn/specs/events/event_definitions.py +++ b/SpiffWorkflow/bpmn/specs/events/event_definitions.py @@ -78,21 +78,6 @@ class EventDefinition(object): def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ - def serialize(self): - return { - 'classname': self.__class__.__module__ + '.' + self.__class__.__name__, - 'internal': self.internal, - 'external': self.external, - } - - @classmethod - def deserialize(cls, dct): - dct.pop('classname') - internal, external = dct.pop('internal'), dct.pop('external') - obj = cls(**dct) - obj.internal, obj.external = internal, external - return obj - class NamedEventDefinition(EventDefinition): """ @@ -112,10 +97,6 @@ class NamedEventDefinition(EventDefinition): def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ and self.name == other.name - def serialize(self): - retdict = super(NamedEventDefinition, self).serialize() - retdict['name'] = self.name - return retdict class CancelEventDefinition(EventDefinition): """ @@ -149,10 +130,6 @@ class ErrorEventDefinition(NamedEventDefinition): def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ and self.error_code in [ None, other.error_code ] - def serialize(self): - retdict = super(ErrorEventDefinition, self).serialize() - retdict['error_code'] = self.error_code - return retdict class EscalationEventDefinition(NamedEventDefinition): """ @@ -177,11 +154,6 @@ class EscalationEventDefinition(NamedEventDefinition): def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ and self.escalation_code in [ None, other.escalation_code ] - def serialize(self): - retdict = super(EscalationEventDefinition, self).serialize() - retdict['escalation_code'] = self.escalation_code - return retdict - class CorrelationProperty: """Rules for generating a correlation key when a message is sent or received.""" @@ -339,12 +311,6 @@ class TimerEventDefinition(EventDefinition): def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label - def serialize(self): - retdict = super(TimerEventDefinition, self).serialize() - retdict['label'] = self.label - retdict['dateTime'] = self.dateTime - return retdict - class CycleTimerEventDefinition(EventDefinition): """ @@ -409,12 +375,6 @@ class CycleTimerEventDefinition(EventDefinition): def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label - def serialize(self): - retdict = super(CycleTimerEventDefinition, self).serialize() - retdict['label'] = self.label - retdict['cycle_definition'] = self.cycle_definition - return retdict - class MultipleEventDefinition(EventDefinition): diff --git a/SpiffWorkflow/bpmn/specs/events/event_types.py b/SpiffWorkflow/bpmn/specs/events/event_types.py index f2aa6d21d..70739593b 100644 --- a/SpiffWorkflow/bpmn/specs/events/event_types.py +++ b/SpiffWorkflow/bpmn/specs/events/event_types.py @@ -72,13 +72,6 @@ class CatchingEvent(Simple, BpmnSpecMixin): self.event_definition.reset(my_task) super(CatchingEvent, self)._on_complete_hook(my_task) - def serialize(self, serializer): - return serializer.serialize_generic_event(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_generic_event(wf_spec, s_state, cls) - class ThrowingEvent(Simple, BpmnSpecMixin): """Base Task Spec for Throwing Event nodes.""" @@ -95,10 +88,3 @@ class ThrowingEvent(Simple, BpmnSpecMixin): def _on_complete_hook(self, my_task): super(ThrowingEvent, self)._on_complete_hook(my_task) self.event_definition.throw(my_task) - - def serialize(self, serializer): - return serializer.serialize_generic_event(self) - - @classmethod - def deserialize(cls, serializer, wf_spec, s_state): - return serializer.deserialize_generic_event(wf_spec, s_state, cls) diff --git a/SpiffWorkflow/bpmn/workflow.py b/SpiffWorkflow/bpmn/workflow.py index 0f556e31d..6cd18dbac 100644 --- a/SpiffWorkflow/bpmn/workflow.py +++ b/SpiffWorkflow/bpmn/workflow.py @@ -41,22 +41,14 @@ class BpmnWorkflow(Workflow): Spiff Workflow class with a few extra methods and attributes. """ - def __init__(self, top_level_spec, subprocess_specs=None, name=None, script_engine=None, - read_only=False, **kwargs): + def __init__(self, top_level_spec, subprocess_specs=None, name=None, script_engine=None, **kwargs): """ Constructor. :param script_engine: set to an extension of PythonScriptEngine if you need a specialised version. Defaults to the script engine of the top most workflow, or to the PythonScriptEngine if none is provided. - - :param read_only: If this parameter is set then the workflow state - cannot change. It can only be queried to find out about the current - state. This is used in conjunction with the CompactWorkflowSerializer - to provide read only access to a previously saved workflow. """ - self._busy_with_restore = False - # THIS IS THE LINE THAT LOGS super(BpmnWorkflow, self).__init__(top_level_spec, **kwargs) self.name = name or top_level_spec.name self.subprocess_specs = subprocess_specs or {} @@ -64,7 +56,6 @@ class BpmnWorkflow(Workflow): self.bpmn_messages = [] self.correlations = {} self.__script_engine = script_engine or PythonScriptEngine() - self.read_only = read_only @property def script_engine(self): @@ -82,7 +73,6 @@ class BpmnWorkflow(Workflow): workflow = self._get_outermost_workflow(my_task) subprocess = BpmnWorkflow( workflow.subprocess_specs[spec_name], name=name, - read_only=self.read_only, script_engine=self.script_engine, parent=my_task.workflow) workflow.subprocesses[my_task.id] = subprocess @@ -134,8 +124,6 @@ class BpmnWorkflow(Workflow): :param event_definition: the thrown event """ - assert not self.read_only and not self._is_busy_with_restore() - # Start a subprocess for known specs with start events that catch this # This is total hypocritical of me given how I've argued that specs should # be immutable, but I see no other way of doing this. @@ -180,7 +168,6 @@ class BpmnWorkflow(Workflow): :param will_complete_task: Callback that will be called prior to completing a task :param did_complete_task: Callback that will be called after completing a task """ - assert not self.read_only engine_steps = list( [t for t in self.get_tasks(TaskState.READY) if self._is_engine_task(t.task_spec)]) @@ -207,7 +194,6 @@ class BpmnWorkflow(Workflow): :param will_refresh_task: Callback that will be called prior to refreshing a task :param did_refresh_task: Callback that will be called after refreshing a task """ - assert not self.read_only for my_task in self.get_tasks(TaskState.WAITING): if will_refresh_task is not None: will_refresh_task(my_task) @@ -232,12 +218,11 @@ class BpmnWorkflow(Workflow): def _find_task(self, task_id): if task_id is None: - raise WorkflowException(self.spec, 'task_id is None') + raise WorkflowException('task_id is None', task_spec=self.spec) for task in self.get_tasks(): if task.id == task_id: return task - raise WorkflowException(self.spec, - f'A task with the given task_id ({task_id}) was not found') + raise WorkflowException(f'A task with the given task_id ({task_id}) was not found', task_spec=self.spec) def complete_task_from_id(self, task_id): # I don't even know why we use this stupid function instead of calling task.complete, @@ -252,9 +237,7 @@ class BpmnWorkflow(Workflow): return task.reset_token(data) def get_ready_user_tasks(self,lane=None): - """ - Returns a list of User Tasks that are READY for user action - """ + """Returns a list of User Tasks that are READY for user action""" if lane is not None: return [t for t in self.get_tasks(TaskState.READY) if (not self._is_engine_task(t.task_spec)) @@ -264,26 +247,14 @@ class BpmnWorkflow(Workflow): if not self._is_engine_task(t.task_spec)] def get_waiting_tasks(self): - """ - Returns a list of all WAITING tasks - """ + """Returns a list of all WAITING tasks""" return self.get_tasks(TaskState.WAITING) def get_catching_tasks(self): return [ task for task in self.get_tasks() if isinstance(task.task_spec, CatchingEvent) ] - def _is_busy_with_restore(self): - if self.outer_workflow == self: - return self._busy_with_restore - return self.outer_workflow._is_busy_with_restore() - def _is_engine_task(self, task_spec): - return (not hasattr(task_spec, 'is_engine_task') or - task_spec.is_engine_task()) + return (not hasattr(task_spec, 'is_engine_task') or task_spec.is_engine_task()) def _task_completed_notify(self, task): - assert (not self.read_only) or self._is_busy_with_restore() super(BpmnWorkflow, self)._task_completed_notify(task) - - def _task_cancelled_notify(self, task): - assert (not self.read_only) or self._is_busy_with_restore() diff --git a/SpiffWorkflow/camunda/parser/task_spec.py b/SpiffWorkflow/camunda/parser/task_spec.py index c62f464e9..df31984c1 100644 --- a/SpiffWorkflow/camunda/parser/task_spec.py +++ b/SpiffWorkflow/camunda/parser/task_spec.py @@ -44,7 +44,7 @@ class UserTaskParser(TaskParser): """ Base class for parsing User Tasks """ - + def __init__(self, process_parser, spec_class, node, lane=None): nsmap = DEFAULT_NSMAP.copy() nsmap.update({'camunda': CAMUNDA_MODEL_NS}) @@ -63,7 +63,7 @@ class UserTaskParser(TaskParser): form = Form() try: form.key = self.node.attrib['{' + CAMUNDA_MODEL_NS + '}formKey'] - except (KeyError): + except KeyError: return form for xml_field in self.xpath('.//camunda:formData/camunda:formField'): if xml_field.get('type') == 'enum': diff --git a/SpiffWorkflow/camunda/specs/UserTask.py b/SpiffWorkflow/camunda/specs/UserTask.py index 803605ae5..6a952cf2a 100644 --- a/SpiffWorkflow/camunda/specs/UserTask.py +++ b/SpiffWorkflow/camunda/specs/UserTask.py @@ -6,6 +6,7 @@ from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin class UserTask(UserTask, BpmnSpecMixin): + """Task Spec for a bpmn:userTask node with Camunda forms.""" def __init__(self, wf_spec, name, form, **kwargs): """ @@ -16,24 +17,12 @@ class UserTask(UserTask, BpmnSpecMixin): super(UserTask, self).__init__(wf_spec, name, **kwargs) self.form = form - - """ - Task Spec for a bpmn:userTask node. - """ - def _on_trigger(self, my_task): pass def is_engine_task(self): return False - def serialize(self, serializer): - return serializer.serialize_user_task(self) - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_user_task(wf_spec, s_state) - class FormField(object): def __init__(self, form_type="text"): @@ -66,8 +55,6 @@ class FormField(object): def has_validation(self, name): return self.get_validation(name) is not None - def jsonable(self): - return self.__dict__ class EnumFormField(FormField): def __init__(self): @@ -77,17 +64,12 @@ class EnumFormField(FormField): def add_option(self, option_id, name): self.options.append(EnumFormFieldOption(option_id, name)) - def jsonable(self): - return self.__dict__ - class EnumFormFieldOption: def __init__(self, option_id, name): self.id = option_id self.name = name - def jsonable(self): - return self.__dict__ class FormFieldProperty: @@ -95,18 +77,12 @@ class FormFieldProperty: self.id = property_id self.value = value - def jsonable(self): - return self.__dict__ - class FormFieldValidation: def __init__(self, name, config): self.name = name self.config = config - def jsonable(self): - return self.__dict__ - class Form: def __init__(self,init=None): @@ -118,9 +94,6 @@ class Form: def add_field(self, field): self.fields.append(field) - def jsonable(self): - return self.__dict__ - def from_dict(self,formdict): self.key = formdict['key'] for field in formdict['fields']: diff --git a/SpiffWorkflow/camunda/specs/events/event_definitions.py b/SpiffWorkflow/camunda/specs/events/event_definitions.py index c9f5bfc55..e98d29707 100644 --- a/SpiffWorkflow/camunda/specs/events/event_definitions.py +++ b/SpiffWorkflow/camunda/specs/events/event_definitions.py @@ -46,9 +46,3 @@ class MessageEventDefinition(MessageEventDefinition): def reset(self, my_task): my_task.internal_data.pop('result_var', None) super(MessageEventDefinition, self).reset(my_task) - - def serialize(self): - retdict = super().serialize() - retdict['payload'] = self.payload - retdict['result_var'] = self.result_var - return retdict \ No newline at end of file diff --git a/SpiffWorkflow/dmn/engine/DMNEngine.py b/SpiffWorkflow/dmn/engine/DMNEngine.py index 0d63ac524..dcf7880cc 100644 --- a/SpiffWorkflow/dmn/engine/DMNEngine.py +++ b/SpiffWorkflow/dmn/engine/DMNEngine.py @@ -2,6 +2,7 @@ import logging import re from ..specs.model import HitPolicy +from ...exceptions import SpiffWorkflowException, WorkflowTaskException from ...util import levenshtein from ...workflow import WorkflowException @@ -54,18 +55,13 @@ class DMNEngine: try: if not self.evaluate(input_val, lhs, task): return False - except NameError as e: - # Add a bit of info, re-raise as Name Error - raise NameError(str(e) + "Failed to execute " - "expression: '%s' is '%s' in the " - "Row with annotation '%s'") - except WorkflowException as we: - raise we + except SpiffWorkflowException as se: + se.add_note(f"Rule failed on row {rule.row_number}") + raise se except Exception as e: - raise Exception("Failed to execute " - "expression: '%s' is '%s' in the " - "Row with annotation '%s', %s" % ( - input_val, lhs, rule.description, str(e))) + error = WorkflowTaskException(str(e), task=task, exception=e) + error.add_note(f"Failed to execute DMN Rule on row {rule.row_number}") + raise error else: # Empty means ignore decision value continue # Check the other operators/columns @@ -111,10 +107,7 @@ class DMNEngine: external_methods=external_methods) # The input expression just has to be something that can be parsed as is by the engine. - try: - script_engine.validate(input_expr) - except Exception as e: - raise WorkflowException(f"Input Expression '{input_expr}' is malformed. " + str(e)) + script_engine.validate(input_expr) # If we get here, we need to check whether the match expression includes # an operator or if can use '==' diff --git a/SpiffWorkflow/dmn/parser/BpmnDmnParser.py b/SpiffWorkflow/dmn/parser/BpmnDmnParser.py index 279023f8f..c1b9799f8 100644 --- a/SpiffWorkflow/dmn/parser/BpmnDmnParser.py +++ b/SpiffWorkflow/dmn/parser/BpmnDmnParser.py @@ -32,7 +32,7 @@ class BpmnDmnParser(BpmnParser): options = ', '.join(list(self.dmn_parsers.keys())) raise ValidationException( 'No DMN Diagram available with id "%s", Available DMN ids are: %s' %(decision_ref, options), - node=node, filename='') + node=node, file_name='') dmn_parser = self.dmn_parsers[decision_ref] dmn_parser.parse() decision = dmn_parser.decision diff --git a/SpiffWorkflow/dmn/parser/DMNParser.py b/SpiffWorkflow/dmn/parser/DMNParser.py index 02d7ae568..d08cef53b 100644 --- a/SpiffWorkflow/dmn/parser/DMNParser.py +++ b/SpiffWorkflow/dmn/parser/DMNParser.py @@ -1,6 +1,7 @@ import ast from SpiffWorkflow.bpmn.parser.node_parser import NodeParser, DEFAULT_NSMAP +from ...bpmn.parser.ValidationException import ValidationException from ...bpmn.parser.util import xpath_eval @@ -69,25 +70,20 @@ class DMNParser(NodeParser): def _parse_decision(self, root): decision_elements = list(root) if len(decision_elements) == 0: - raise Exception('No decisions found') + raise ValidationException('No decisions found', file_name=self.filename, + node=root) if len(decision_elements) > 1: - raise Exception('Multiple decisions found') + raise ValidationException('Multiple decision tables are not current supported.', + file_name=self.filename, node=root) decision_element = decision_elements[0] - assert decision_element.tag.endswith( - 'decision'), 'Element %r is not of type "decision"' % ( - decision_element.tag) decision = Decision(decision_element.attrib['id'], decision_element.attrib.get('name', '')) # Parse decision tables - try: - self._parse_decision_tables(decision, decision_element) - except Exception as e: - raise Exception( - "Error in Decision '%s': %s" % (decision.name, str(e))) + self._parse_decision_tables(decision, decision_element) return decision @@ -104,6 +100,7 @@ class DMNParser(NodeParser): def _parse_inputs_outputs(self, decisionTable, decisionTableElement): + rule_counter = 0 for element in decisionTableElement: if element.tag.endswith('input'): e_input = self._parse_input(element) @@ -112,11 +109,13 @@ class DMNParser(NodeParser): output = self._parse_output(element) decisionTable.outputs.append(output) elif element.tag.endswith('rule'): - rule = self._parse_rule(decisionTable, element) + rule_counter += 1 + rule = self._parse_rule(decisionTable, element, rule_counter) decisionTable.rules.append(rule) else: - raise Exception( - 'Unknown type in decision table: %r' % element.tag) + raise ValidationException( + 'Unknown type in decision table: %r' % element.tag, + node=element, file_name=self.filename) def _parse_input(self, input_element): type_ref = None @@ -142,9 +141,9 @@ class DMNParser(NodeParser): outputElement.attrib.get('typeRef', '')) return output - def _parse_rule(self, decisionTable, ruleElement): + def _parse_rule(self, decisionTable, ruleElement, rowNumber): rule = Rule(ruleElement.attrib['id']) - + rule.row_number = rowNumber input_idx = 0 output_idx = 0 for child in ruleElement: @@ -189,6 +188,7 @@ class DMNParser(NodeParser): try: ast.parse(entry.text) except Exception as e: - raise Exception( - "Malformed Output Expression '%s'. %s " % (entry.text, str(e))) + raise ValidationException( + "Malformed Output Expression '%s'. %s " % (entry.text, str(e)), + node=element, file_name=self.filename) return entry diff --git a/SpiffWorkflow/dmn/serializer/task_spec_converters.py b/SpiffWorkflow/dmn/serializer/task_spec_converters.py index 7c65ab5f4..9e78c418f 100644 --- a/SpiffWorkflow/dmn/serializer/task_spec_converters.py +++ b/SpiffWorkflow/dmn/serializer/task_spec_converters.py @@ -47,6 +47,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter): def rule_to_dict(self, rule): return { 'id': rule.id, + 'row_number': rule.row_number, 'description': rule.description, 'input_entries': [self.input_entry_to_dict(entry) for entry in rule.inputEntries], 'output_entries': [self.output_entry_to_dict(entry) for entry in rule.outputEntries], @@ -91,6 +92,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter): def rule_from_dict(self, dct, inputs, outputs): rule = Rule(dct['id']) rule.description = dct['description'] + rule.row_number = dct.get('row_number', 0) rule.inputEntries = [self.input_entry_from_dict(entry, inputs) for entry in dct['input_entries']] rule.outputEntries = [self.output_entry_from_dict(entry, outputs) diff --git a/SpiffWorkflow/dmn/specs/BusinessRuleTask.py b/SpiffWorkflow/dmn/specs/BusinessRuleTask.py index 8486def2b..020c03915 100644 --- a/SpiffWorkflow/dmn/specs/BusinessRuleTask.py +++ b/SpiffWorkflow/dmn/specs/BusinessRuleTask.py @@ -1,4 +1,5 @@ -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException +from SpiffWorkflow.exceptions import WorkflowTaskException, WorkflowException, \ + SpiffWorkflowException from ...specs.Simple import Simple @@ -29,14 +30,11 @@ class BusinessRuleTask(Simple, BpmnSpecMixin): my_task.data = DeepMerge.merge(my_task.data, self.dmnEngine.result(my_task)) super(BusinessRuleTask, self)._on_complete_hook(my_task) + except SpiffWorkflowException as we: + we.add_note(f"Business Rule Task '{my_task.task_spec.description}'.") + raise we except Exception as e: - raise WorkflowTaskExecException(my_task, str(e)) - - def serialize(self, serializer): - return serializer.serialize_business_rule_task(self) - - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_business_rule_task(wf_spec, s_state) - + error = WorkflowTaskException(str(e), task=my_task) + error.add_note(f"Business Rule Task '{my_task.task_spec.description}'.") + raise error diff --git a/SpiffWorkflow/dmn/specs/model.py b/SpiffWorkflow/dmn/specs/model.py index a4d847b5b..bf3322d18 100644 --- a/SpiffWorkflow/dmn/specs/model.py +++ b/SpiffWorkflow/dmn/specs/model.py @@ -36,31 +36,6 @@ class DecisionTable: self.outputs = [] self.rules = [] - def serialize(self): - out = {} - out['id'] = self.id - out['name'] = self.name - out['hit_policy'] = self.hit_policy - out['inputs'] = [x.serialize() for x in self.inputs] - out['outputs'] = [x.serialize() for x in self.outputs] - out['rules'] = [x.serialize() for x in self.rules] - return out - - def deserialize(self,indict): - self.id = indict['id'] - self.name = indict['name'] - if 'hit_policy' in indict: - self.hit_policy = indict['hit_policy'] - else: - self.hit_policy = HitPolicy.UNIQUE.value - self.inputs = [Input(**x) for x in indict['inputs']] - list(map(lambda x, y: x.deserialize(y), self.inputs, indict['inputs'])) - self.outputs = [Output(**x) for x in indict['outputs']] - self.rules = [Rule(None) for x in indict['rules']] - list(map(lambda x, y: x.deserialize(y),self.rules,indict['rules'])) - - - class Input: def __init__(self, id, label, name, expression, typeRef): @@ -70,20 +45,6 @@ class Input: self.expression = expression self.typeRef = typeRef - def serialize(self): - out = {} - out['id'] = self.id - out['label'] = self.label - out['name'] = self.name - out['expression'] = self.expression - out['typeRef'] = self.typeRef - return out - - def deserialize(self,indict): - pass - - - class InputEntry: def __init__(self, id, input): @@ -93,20 +54,6 @@ class InputEntry: self.description = '' self.lhs = [] - def serialize(self): - out = {} - out['id'] = self.id - out['input'] = self.input.serialize() - out['description'] = self.description - out['lhs'] = self.lhs - return out - - def deserialize(self, indict): - self.id = indict['id'] - self.description = indict['description'] - self.lhs = indict['lhs'] - self.input = Input(**indict['input']) - self.input.deserialize(indict['input']) class Output: def __init__(self, id, label, name, typeRef): @@ -115,14 +62,6 @@ class Output: self.name = name self.typeRef = typeRef - def serialize(self): - out = {} - out['id'] = self.id - out['label'] = self.label - out['name'] = self.name - out['typeRef'] = self.typeRef - return out - class OutputEntry: def __init__(self, id, output): @@ -132,45 +71,15 @@ class OutputEntry: self.description = '' self.text = '' - def serialize(self): - out = {} - out['id'] = self.id - out['output'] = self.output.serialize() - out['description'] = self.description - out['text'] = self.text - return out - - def deserialize(self, indict): - self.id = indict['id'] - self.description = indict['description'] - self.text = indict['text'] - self.output = Output(**indict['output']) - - class Rule: def __init__(self, id): self.id = id - + self.row_number = 0 self.description = '' self.inputEntries = [] self.outputEntries = [] - def serialize(self): - out = {} - out['id'] = self.id - out['description'] = self.description - out['inputEntries'] = [x.serialize() for x in self.inputEntries] - out['outputEntries'] = [x.serialize() for x in self.outputEntries] - return out - - def deserialize(self,indict): - self.id = indict['id'] - self.description = indict['description'] - self.inputEntries = [InputEntry(None,None) for x in indict['inputEntries']] - list(map(lambda x,y : x.deserialize(y), self.inputEntries, indict['inputEntries'])) - self.outputEntries = [OutputEntry(None, None) for x in indict['outputEntries']] - list(map(lambda x, y: x.deserialize(y), self.outputEntries, indict['outputEntries'])) def output_as_dict(self, task): script_engine = task.workflow.script_engine diff --git a/SpiffWorkflow/exceptions.py b/SpiffWorkflow/exceptions.py index 2100fc163..85e4fb366 100644 --- a/SpiffWorkflow/exceptions.py +++ b/SpiffWorkflow/exceptions.py @@ -15,25 +15,45 @@ # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA +import re + +from SpiffWorkflow.util import levenshtein -class WorkflowException(Exception): +class SpiffWorkflowException(Exception): + """ + Base class for all SpiffWorkflow-generated exceptions. + """ + def __init__(self, msg): + super().__init__(msg) + self.notes = [] + + def add_note(self, note): + """add_note is a python 3.11 feature, this can be removed when we + stop supporting versions prior to 3.11""" + self.notes.append(note) + + def __str__(self): + return super().__str__() + ". " + ". ".join(self.notes) + + +class WorkflowException(SpiffWorkflowException): """ Base class for all SpiffWorkflow-generated exceptions. """ - def __init__(self, sender, error): + def __init__(self, message, task_spec=None): """ Standard exception class. - :param sender: the task spec that threw the exception - :type sender: TaskSpec - :param error: a human readable error message + :param task_spec: the task spec that threw the exception + :type task_spec: TaskSpec + :param error: a human-readable error message :type error: string """ - Exception.__init__(self, str(error)) + super().__init__(str(message)) # Points to the TaskSpec that generated the exception. - self.sender = sender + self.task_spec = task_spec @staticmethod def get_task_trace(task): @@ -45,10 +65,31 @@ class WorkflowException(Exception): task_trace.append(f"{workflow.spec.task_specs[caller].description} ({workflow.spec.file})") return task_trace + @staticmethod + def did_you_mean_from_name_error(name_exception, options): + """Returns a string along the lines of 'did you mean 'dog'? Given + a name_error, and a set of possible things that could have been called, + or an empty string if no valid suggestions come up. """ + if isinstance(name_exception, NameError): + def_match = re.match("name '(.+)' is not defined", str(name_exception)) + if def_match: + bad_variable = re.match("name '(.+)' is not defined", + str(name_exception)).group(1) + most_similar = levenshtein.most_similar(bad_variable, + options, 3) + error_msg = "" + if len(most_similar) == 1: + error_msg += f' Did you mean \'{most_similar[0]}\'?' + if len(most_similar) > 1: + error_msg += f' Did you mean one of \'{most_similar}\'?' + return error_msg + + class WorkflowTaskException(WorkflowException): """WorkflowException that provides task_trace information.""" - def __init__(self, task, error_msg, exception=None): + def __init__(self, error_msg, task=None, exception=None, + line_number=None, offset=None, error_line=None): """ Exception initialization. @@ -60,17 +101,31 @@ class WorkflowTaskException(WorkflowException): :type exception: Exception """ - self.exception = exception self.task = task + self.line_number = line_number + self.offset = offset + self.error_line = error_line + if exception: + self.error_type = exception.__class__.__name__ + else: + self.error_type = "unknown" + super().__init__(error_msg, task_spec=task.task_spec) - # If encountered in a sub-workflow, this traces back up the stack - # so we can tell how we got to this paticular task, no matter how + if isinstance(exception, SyntaxError) and not line_number: + # Line number and offset can be recovered directly from syntax errors, + # otherwise they must be passed in. + self.line_number = exception.lineno + self.offset = exception.offset + elif isinstance(exception, NameError): + self.add_note(self.did_you_mean_from_name_error(exception, list(task.data.keys()))) + + # If encountered in a sub-workflow, this traces back up the stack, + # so we can tell how we got to this particular task, no matter how # deeply nested in sub-workflows it is. Takes the form of: # task-description (file-name) self.task_trace = self.get_task_trace(task) - super().__init__(task.task_spec, error_msg) -class StorageException(Exception): +class StorageException(SpiffWorkflowException): pass diff --git a/SpiffWorkflow/operators.py b/SpiffWorkflow/operators.py index 4d6e0213f..a5913b123 100644 --- a/SpiffWorkflow/operators.py +++ b/SpiffWorkflow/operators.py @@ -225,7 +225,7 @@ class Operator(Term): return values def _matches(self, task): - raise Exception("Abstract class, do not call") + raise NotImplementedError("Abstract class, do not call") def serialize(self, serializer): """ diff --git a/SpiffWorkflow/serializer/dict.py b/SpiffWorkflow/serializer/dict.py index 8d1e3ad42..ce27c9c2c 100644 --- a/SpiffWorkflow/serializer/dict.py +++ b/SpiffWorkflow/serializer/dict.py @@ -586,29 +586,17 @@ class DictionarySerializer(Serializer): assert isinstance(workflow, Workflow) s_state = dict() if include_spec: - s_state['wf_spec'] = self.serialize_workflow_spec(workflow.spec, - **kwargs) + s_state['wf_spec'] = self.serialize_workflow_spec(workflow.spec, **kwargs) - # data s_state['data'] = self.serialize_dict(workflow.data) - - # last_node value = workflow.last_task s_state['last_task'] = value.id if value is not None else None - - # outer_workflow - # s_state['outer_workflow'] = workflow.outer_workflow.id - - # success s_state['success'] = workflow.success - - # task_tree s_state['task_tree'] = self.serialize_task(workflow.task_tree) return s_state - def deserialize_workflow(self, s_state, wf_class=Workflow, - read_only=False, wf_spec=None, **kwargs): + def deserialize_workflow(self, s_state, wf_class=Workflow, wf_spec=None, **kwargs): """It is possible to override the workflow class, and specify a workflow_spec, otherwise the spec is assumed to be serialized in the s_state['wf_spec']""" @@ -616,23 +604,9 @@ class DictionarySerializer(Serializer): if wf_spec is None: wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs) workflow = wf_class(wf_spec) - - workflow.read_only = read_only - - # data workflow.data = self.deserialize_dict(s_state['data']) - - # outer_workflow - # workflow.outer_workflow = - # find_workflow_by_id(remap_workflow_id(s_state['outer_workflow'])) - - # success workflow.success = s_state['success'] - - # workflow workflow.spec = wf_spec - - # task_tree workflow.task_tree = self.deserialize_task( workflow, s_state['task_tree']) @@ -641,15 +615,11 @@ class DictionarySerializer(Serializer): for task in tasklist: task.parent = workflow.get_task(task.parent,tasklist) - # last_task workflow.last_task = workflow.get_task(s_state['last_task'],tasklist) - - # task_mapping workflow.update_task_mapping() return workflow - def serialize_task(self, task, skip_children=False, allow_subs=False): """ :param allow_subs: Allows sub-serialization to take place, otherwise diff --git a/SpiffWorkflow/serializer/json.py b/SpiffWorkflow/serializer/json.py index c765d5501..07a8d3486 100644 --- a/SpiffWorkflow/serializer/json.py +++ b/SpiffWorkflow/serializer/json.py @@ -53,14 +53,6 @@ class JSONSerializer(DictionarySerializer): return dct - def _jsonableHandler(self, obj): - if hasattr(obj, 'jsonable'): - return obj.jsonable() - - raise 'Object of type %s with value of %s is not JSON serializable' % ( - type(obj), repr(obj)) - - def _default(self, obj): if isinstance(obj, uuid.UUID): return {'__uuid__': obj.hex} diff --git a/SpiffWorkflow/signavio/parser/bpmn.py b/SpiffWorkflow/signavio/parser/bpmn.py index 5bd16abff..dda808b44 100644 --- a/SpiffWorkflow/signavio/parser/bpmn.py +++ b/SpiffWorkflow/signavio/parser/bpmn.py @@ -16,5 +16,5 @@ class SignavioBpmnParser(BpmnParser): 'Intermediate Catch Event has no incoming sequences. ' 'This might be a Boundary Event that has been ' 'disconnected.', - node=catch_event, filename=filename) - return super().add_bpmn_xml(bpmn, filename) \ No newline at end of file + node=catch_event, file_name=filename) + return super().add_bpmn_xml(bpmn, filename) diff --git a/SpiffWorkflow/signavio/parser/tasks.py b/SpiffWorkflow/signavio/parser/tasks.py index a9721cf74..5a9ac6f4b 100644 --- a/SpiffWorkflow/signavio/parser/tasks.py +++ b/SpiffWorkflow/signavio/parser/tasks.py @@ -24,7 +24,7 @@ class CallActivityParser(TaskParser): f"The process '{called_element}' was not found. Did you mean one of the following: " f"{', '.join(self.process_parser.parser.get_process_ids())}?", node=self.node, - filename=self.process_parser.filename) + file_name=self.process_parser.filename) return parser.get_id() def _fix_call_activities(self): @@ -41,5 +41,5 @@ class CallActivityParser(TaskParser): if not signavio_meta_data: raise ValidationException( 'No Signavio "Subprocess reference" specified.', - node=self.node, filename=self.filename) + node=self.node, file_name=self.filename) return one(signavio_meta_data).get('metaValue') diff --git a/SpiffWorkflow/specs/Cancel.py b/SpiffWorkflow/specs/Cancel.py index 913f4d8b3..bac74c222 100644 --- a/SpiffWorkflow/specs/Cancel.py +++ b/SpiffWorkflow/specs/Cancel.py @@ -53,7 +53,7 @@ class Cancel(TaskSpec): """ TaskSpec.test(self) if len(self.outputs) > 0: - raise WorkflowException(self, 'Cancel with an output.') + raise WorkflowException('Cancel with an output.', task_spec=self) def _on_complete_hook(self, my_task): my_task.workflow.cancel(self.cancel_successfully) diff --git a/SpiffWorkflow/specs/ExclusiveChoice.py b/SpiffWorkflow/specs/ExclusiveChoice.py index 00978d7d5..c31fcbe1b 100644 --- a/SpiffWorkflow/specs/ExclusiveChoice.py +++ b/SpiffWorkflow/specs/ExclusiveChoice.py @@ -64,7 +64,7 @@ class ExclusiveChoice(MultiChoice): """ MultiChoice.test(self) if self.default_task_spec is None: - raise WorkflowException(self, 'A default output is required.') + raise WorkflowException('A default output is required.', task_spec=self) def _predict_hook(self, my_task): # If the task's status is not predicted, we default to MAYBE @@ -84,8 +84,7 @@ class ExclusiveChoice(MultiChoice): break if output is None: - raise WorkflowException(self, - f'No conditions satisfied for {my_task.task_spec.name}') + raise WorkflowException(f'No conditions satisfied for {my_task.task_spec.name}', task_spec=self) my_task._sync_children([output], TaskState.FUTURE) diff --git a/SpiffWorkflow/specs/Join.py b/SpiffWorkflow/specs/Join.py index 23be2c792..4a4238ed4 100644 --- a/SpiffWorkflow/specs/Join.py +++ b/SpiffWorkflow/specs/Join.py @@ -154,7 +154,7 @@ class Join(TaskSpec): split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: msg = 'Join with %s, which was not reached' % self.split_task - raise WorkflowException(self, msg) + raise WorkflowException(msg, task_spec=self) tasks = split_task.task_spec._get_activated_tasks(split_task, my_task) # The default threshold is the number of branches that were started. diff --git a/SpiffWorkflow/specs/LoopResetTask.py b/SpiffWorkflow/specs/LoopResetTask.py index 0e745aa28..582ad987b 100644 --- a/SpiffWorkflow/specs/LoopResetTask.py +++ b/SpiffWorkflow/specs/LoopResetTask.py @@ -57,10 +57,11 @@ class LoopResetTask(TaskSpec): # maybe upstream someone will be able to handle this situation task._set_state(TaskState.WAITING) if isinstance(e, WorkflowTaskException): + e.add_note('Error occurred during a loop back to a previous step.') raise e else: raise WorkflowTaskException( - task, 'Error during loop back:' + str(e), e) + 'Error during loop back:' + str(e), task=task, exception=e) super(LoopResetTask, self)._on_complete_hook(task) def serialize(self, serializer): diff --git a/SpiffWorkflow/specs/MultiChoice.py b/SpiffWorkflow/specs/MultiChoice.py index 3f1cd36fd..f43e6d2fb 100644 --- a/SpiffWorkflow/specs/MultiChoice.py +++ b/SpiffWorkflow/specs/MultiChoice.py @@ -70,14 +70,14 @@ class MultiChoice(TaskSpec): """ TaskSpec.test(self) if len(self.cond_task_specs) < 1: - raise WorkflowException(self, 'At least one output required.') + raise WorkflowException('At least one output required.', task_spec=self) for condition, name in self.cond_task_specs: if name is None: - raise WorkflowException(self, 'Condition with no task spec.') + raise WorkflowException('Condition with no task spec.', task_spec=self) task_spec = self._wf_spec.get_task_spec_from_name(name) if task_spec is None: msg = 'Condition leads to non-existent task ' + repr(name) - raise WorkflowException(self, msg) + raise WorkflowException(msg, task_spec=self) if condition is None: continue diff --git a/SpiffWorkflow/specs/StartTask.py b/SpiffWorkflow/specs/StartTask.py index c79a7d611..5f99a90a5 100644 --- a/SpiffWorkflow/specs/StartTask.py +++ b/SpiffWorkflow/specs/StartTask.py @@ -44,7 +44,7 @@ class StartTask(TaskSpec): """ Called by the previous task to let us know that it exists. """ - raise WorkflowException(self, 'StartTask can not have any inputs.') + raise WorkflowException('StartTask can not have any inputs.', task_spec=self) def test(self): """ @@ -52,9 +52,9 @@ class StartTask(TaskSpec): if an error was detected. """ if len(self.inputs) != 0: - raise WorkflowException(self, 'StartTask with an input.') + raise WorkflowException('StartTask with an input.', task_spec=self) elif len(self.outputs) < 1: - raise WorkflowException(self, 'No output task connected.') + raise WorkflowException('No output task connected.', task_spec=self) def serialize(self, serializer): return serializer.serialize_start_task(self) diff --git a/SpiffWorkflow/specs/SubWorkflow.py b/SpiffWorkflow/specs/SubWorkflow.py index 318c224ba..3e18acaed 100644 --- a/SpiffWorkflow/specs/SubWorkflow.py +++ b/SpiffWorkflow/specs/SubWorkflow.py @@ -72,8 +72,7 @@ class SubWorkflow(TaskSpec): def test(self): TaskSpec.test(self) if self.file is not None and not os.path.exists(self.file): - raise WorkflowException( - self, 'File does not exist: %s' % self.file) + raise WorkflowException('File does not exist: %s' % self.file, task_spec=self) def _predict_hook(self, my_task): # Modifying the task spec is a TERRIBLE idea, but if we don't do it, sync_children won't work diff --git a/SpiffWorkflow/specs/ThreadMerge.py b/SpiffWorkflow/specs/ThreadMerge.py index ddbd0d712..e1b48e0cb 100644 --- a/SpiffWorkflow/specs/ThreadMerge.py +++ b/SpiffWorkflow/specs/ThreadMerge.py @@ -63,7 +63,7 @@ class ThreadMerge(Join): split_task = my_task._find_ancestor_from_name(self.split_task) if split_task is None: msg = 'Join with %s, which was not reached' % self.split_task - raise WorkflowException(self, msg) + raise WorkflowException(msg, task_spec=self) tasks = split_task.task_spec._get_activated_threads(split_task) # The default threshold is the number of threads that were started. diff --git a/SpiffWorkflow/specs/WorkflowSpec.py b/SpiffWorkflow/specs/WorkflowSpec.py index 7f084cb3f..6519873f8 100644 --- a/SpiffWorkflow/specs/WorkflowSpec.py +++ b/SpiffWorkflow/specs/WorkflowSpec.py @@ -152,33 +152,35 @@ class WorkflowSpec(object): def recursive_dump(task_spec, indent): if task_spec in done: return '[shown earlier] %s (%s:%s)' % ( - task_spec.name, task_spec.__class__.__name__, - hex(id(task_spec))) + '\n' - + task_spec.name, + task_spec.__class__.__name__, + hex(id(task_spec)) + ) + '\n' done.add(task_spec) dump = '%s (%s:%s)' % ( task_spec.name, - task_spec.__class__.__name__, hex(id(task_spec))) + '\n' + task_spec.__class__.__name__, + hex(id(task_spec)) + ) + '\n' if verbose: if task_spec.inputs: - dump += indent + '- IN: ' + \ - ','.join(['%s (%s)' % (t.name, hex(id(t))) - for t in task_spec.inputs]) + '\n' + dump += indent + \ + '- IN: ' + \ + ','.join(['%s (%s)' % (t.name, hex(id(t))) for t in task_spec.inputs]) + \ + '\n' if task_spec.outputs: - dump += indent + '- OUT: ' + \ - ','.join(['%s (%s)' % (t.name, hex(id(t))) - for t in task_spec.outputs]) + '\n' - sub_specs = ([task_spec.spec.start] if hasattr( - task_spec, 'spec') else []) + task_spec.outputs - for i, t in enumerate(sub_specs): - dump += indent + ' --> ' + \ - recursive_dump( - t, indent + (' | ' if i + 1 < len(sub_specs) else - ' ')) + dump += indent + \ + '- OUT: ' + \ + ','.join(['%s (%s)' % (t.name, hex(id(t))) for t in task_spec.outputs]) + \ + '\n' + # sub_specs = ([task_spec.spec.start] if hasattr(task_spec, 'spec') else []) + task_spec.outputs + for i, t in enumerate(task_spec.outputs): + dump += indent + \ + ' --> ' + \ + recursive_dump(t, indent + (' | ' if i + 1 < len(task_spec.outputs) else ' ')) return dump dump = recursive_dump(self.start, '') - return dump def dump(self): diff --git a/SpiffWorkflow/spiff/specs/spiff_task.py b/SpiffWorkflow/spiff/specs/spiff_task.py index bb25823af..c67f3690d 100644 --- a/SpiffWorkflow/spiff/specs/spiff_task.py +++ b/SpiffWorkflow/spiff/specs/spiff_task.py @@ -1,5 +1,6 @@ from copy import deepcopy +from SpiffWorkflow.exceptions import SpiffWorkflowException from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.specs .BpmnSpecMixin import BpmnSpecMixin @@ -36,9 +37,17 @@ class SpiffBpmnTask(BpmnSpecMixin): def _on_ready_hook(self, my_task): super()._on_ready_hook(my_task) if self.prescript is not None: - self.execute_script(my_task, self.prescript) + try: + self.execute_script(my_task, self.prescript) + except SpiffWorkflowException as se: + se.add_note("Error occurred in the Pre-Script") + raise se def _on_complete_hook(self, my_task): if self.postscript is not None: - self.execute_script(my_task, self.postscript) - super()._on_complete_hook(my_task) \ No newline at end of file + try: + self.execute_script(my_task, self.postscript) + except SpiffWorkflowException as se: + se.add_note("Error occurred in the Post-Script") + raise se + super()._on_complete_hook(my_task) diff --git a/SpiffWorkflow/task.py b/SpiffWorkflow/task.py index a214eb5b5..9673287c7 100644 --- a/SpiffWorkflow/task.py +++ b/SpiffWorkflow/task.py @@ -182,8 +182,7 @@ class Task(object, metaclass=DeprecatedMetaTask): # Assure we don't recurse forever. self.count += 1 if self.count > self.MAX_ITERATIONS: - raise WorkflowException(current, - "Task Iterator entered infinite recursion loop" ) + raise WorkflowException("Task Iterator entered infinite recursion loop", task_spec=current) # If the current task has children, the first child is the next @@ -266,8 +265,8 @@ class Task(object, metaclass=DeprecatedMetaTask): def state(self, value): if value < self._state: raise WorkflowException( - self.task_spec, - 'state went from %s to %s!' % (self.get_state_name(), TaskStateNames[value]) + 'state went from %s to %s!' % (self.get_state_name(), TaskStateNames[value]), + task_spec=self.task_spec ) self._set_state(value) @@ -345,8 +344,8 @@ class Task(object, metaclass=DeprecatedMetaTask): if self.is_looping(): self.terminate_current_loop = True else: - raise WorkflowException(self.task_spec, - 'The method terminate_loop should only be called in the case of a BPMN Loop Task') + raise WorkflowException('The method terminate_loop should only be called in the case of a BPMN Loop Task', + task_spec=self) def is_looping(self): """Returns true if this is a looping task.""" @@ -475,7 +474,7 @@ class Task(object, metaclass=DeprecatedMetaTask): raise ValueError(self, '_add_child() requires a TaskSpec') if self._is_predicted() and state & TaskState.PREDICTED_MASK == 0: msg = 'Attempt to add non-predicted child to predicted task' - raise WorkflowException(self.task_spec, msg) + raise WorkflowException(msg, task_spec=self.task_spec) task = Task(self.workflow, task_spec, self, state=state) task.thread_id = self.thread_id if state == TaskState.READY: @@ -551,7 +550,7 @@ class Task(object, metaclass=DeprecatedMetaTask): # Definite tasks must not be removed, so they HAVE to be in the given task spec list. if child._is_definite(): - raise WorkflowException(self.task_spec, f'removal of non-predicted child {child}') + raise WorkflowException(f'removal of non-predicted child {child}', task_spec=self.task_spec) unneeded_children.append(child) # Remove and add the children accordingly. diff --git a/SpiffWorkflow/workflow.py b/SpiffWorkflow/workflow.py index 906cfa5c0..6a7836468 100644 --- a/SpiffWorkflow/workflow.py +++ b/SpiffWorkflow/workflow.py @@ -257,7 +257,7 @@ class Workflow(object): :param task_id: The id of the Task object. """ if task_id is None: - raise WorkflowException(self.spec, 'task_id is None') + raise WorkflowException('task_id is None', task_spec=self.spec) data = {} if self.last_task and self.last_task.data: data = self.last_task.data @@ -265,7 +265,7 @@ class Workflow(object): if task.id == task_id: return task.reset_token(data) msg = 'A task with the given task_id (%s) was not found' % task_id - raise WorkflowException(self.spec, msg) + raise WorkflowException(msg, task_spec=self.spec) def get_reset_task_spec(self, destination): """ @@ -300,12 +300,12 @@ class Workflow(object): :param task_id: The id of the Task object. """ if task_id is None: - raise WorkflowException(self.spec, 'task_id is None') + raise WorkflowException('task_id is None', task_spec=self.spec) for task in self.task_tree: if task.id == task_id: return task.complete() msg = 'A task with the given task_id (%s) was not found' % task_id - raise WorkflowException(self.spec, msg) + raise WorkflowException(msg, task_spec=self.spec) def complete_next(self, pick_up=True, halt_on_manual=True): """ diff --git a/doc/errors.rst b/doc/errors.rst new file mode 100644 index 000000000..31c606de2 --- /dev/null +++ b/doc/errors.rst @@ -0,0 +1,90 @@ +SpiffWorkflow Exceptions +==================================== +Details about the exceptions and exception hierarchy within SpiffWorkflow + +SpiffWorkflowException +---------- +Base exception for all exceptions raised by SpiffWorkflow + +ValidationException +---------- + +**Extends** +SpiffWorkflowException + +Thrown during the parsing of a workflow. + +**Attributes/Methods** + +- **tag**: The type of xml tag being parsed +- **id**: the id attribute of the xml tag, if available. +- **name**: the name attribute of the xml tag, if available. +- **line_number**: the line number where the tag occurs. +- **file_name**: The name of the file where the error occurred. +- **message**: a human readable error message. + + +WorkflowException +-------- +When an error occurs with a Task Specification (maybe should have been called +a SpecException) + +**Extends** +SpiffWorkflowException + +**Attributes/Methods** + +- **sender**: The TaskSpec - the specific Task, Gateway, etc... that caused the error to happen. +- **error**: a human readable error message describing the problem. +- **get_task_trace**: Provided a specific Task, will work it's way through the workflow / sub-processes +and call activities to show where an error occurred. Useful if the error happened within a deeply nested structure (where call activities include call activities ....) + +WorkflowDataException +------------------ +When an exception occurs moving data between tasks and Data Objects (including +data inputs and data outputs.) + +**Extends** +WorkflowException + +**Attributes/Methods** + +(in addition to the values in a WorkflowException) + + - **task**: The specific task (not the task spec, but the actual executing task) + - **data_input**: The spec of the input variable + - **data_output**: The spec of the output variable + +WorkflowTaskException +-------- +**Extends** +WorkflowException + +**Attributes/Methods** + +(in addition to the values in a WorkflowException) + + - **task**: The specific task (not the task spec, but the actual executing task) + - **error_msg**: The detailed human readable message. (conflicts with error above) + - **exception**: The original exception this wraps around. + - **line_number** The line number that contains the error + - **offset** The point in the line that caused the error + - **error_line** The content of the line that caused the error. + +It will accept the line_number and error_line as arguments - if the +underlying error provided is a SyntaxError it will try to derive this +information from the error. +If this is a name error, it will attempt to calculate a did-you-mean +error_msg. + +Unused / Deprecated errors +-------------------- + +** StorageException ** +Deprecated -- Used only by the PrettyXmlSerializer - which is not under active +support. + +** DeadMethodCalled ** +Something related to WeakMethod -- which doesn't look to be utilized anymore. + + diff --git a/graphics/spiffworkflow_logo_ideas.svg b/graphics/spiffworkflow_logo_ideas.svg index 28e5d8ac5..b51b8b521 100644 --- a/graphics/spiffworkflow_logo_ideas.svg +++ b/graphics/spiffworkflow_logo_ideas.svg @@ -26,13 +26,13 @@ showgrid="false" showguides="true" inkscape:guide-bbox="true" - inkscape:zoom="0.27433373" - inkscape:cx="-586.87643" - inkscape:cy="1882.7433" + inkscape:zoom="1.5518659" + inkscape:cx="2265.0153" + inkscape:cy="3541.2209" inkscape:window-width="1916" - inkscape:window-height="1076" + inkscape:window-height="916" inkscape:window-x="0" - inkscape:window-y="0" + inkscape:window-y="162" inkscape:window-maximized="1" inkscape:current-layer="layer1"> + + + @@ -839,6 +851,91 @@ x="-580.08496" y="716.69928">Draw the code + + + + + + + + + + + + + + + + + + Draw the code + + + + + diff --git a/tests/SpiffWorkflow/bpmn/ApprovalsTest.py b/tests/SpiffWorkflow/bpmn/ApprovalsTest.py index 7576a23fe..857c81f07 100644 --- a/tests/SpiffWorkflow/bpmn/ApprovalsTest.py +++ b/tests/SpiffWorkflow/bpmn/ApprovalsTest.py @@ -2,7 +2,6 @@ import unittest from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'matth' @@ -86,8 +85,7 @@ class ApprovalsTest(BpmnWorkflowTestCase): self.save_restore() self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval') self.do_next_exclusive_step('Parallel_Approvals_SP.Step1') - self.do_next_exclusive_step( - 'Parallel_Approvals_SP.Supervisor_Approval') + self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval') self.do_next_exclusive_step('Approvals.Parallel_SP_Done') def testSaveRestoreWaiting(self): @@ -108,93 +106,10 @@ class ApprovalsTest(BpmnWorkflowTestCase): self.save_restore() self.do_next_exclusive_step('Parallel_Approvals_SP.Step1') self.save_restore() - self.do_next_exclusive_step( - 'Parallel_Approvals_SP.Supervisor_Approval') + self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval') self.save_restore() self.do_next_exclusive_step('Approvals.Parallel_SP_Done') - def testReadonlyWaiting(self): - - self.do_next_named_step('First_Approval_Wins.Manager_Approval') - - readonly = self.get_read_only_workflow() - self.assertEqual(1, len(readonly.get_ready_user_tasks())) - self.assertEqual('Approvals.First_Approval_Wins_Done', - readonly.get_ready_user_tasks()[0].task_spec.name) - self.assertRaises(AssertionError, readonly.do_engine_steps) - self.assertRaises(AssertionError, readonly.refresh_waiting_tasks) - self.assertRaises(AssertionError, readonly.catch, MessageEventDefinition('Cheese')) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - - self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done') - - readonly = self.get_read_only_workflow() - self.assertEqual(2, len(readonly.get_ready_user_tasks())) - self.assertEqual( - ['Approvals.Manager_Approval__P_', - 'Approvals.Supervisor_Approval__P_'], - sorted(t.task_spec.name for t in readonly.get_ready_user_tasks())) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - - self.do_next_named_step('Approvals.Supervisor_Approval__P_') - - readonly = self.get_read_only_workflow() - self.assertEqual(1, len(readonly.get_ready_user_tasks())) - self.assertEqual('Approvals.Manager_Approval__P_', - readonly.get_ready_user_tasks()[0].task_spec.name) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - self.do_next_named_step('Approvals.Manager_Approval__P_') - - readonly = self.get_read_only_workflow() - self.assertEqual(1, len(readonly.get_ready_user_tasks())) - self.assertEqual('Approvals.Parallel_Approvals_Done', - readonly.get_ready_user_tasks()[0].task_spec.name) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - self.do_next_exclusive_step('Approvals.Parallel_Approvals_Done') - - readonly = self.get_read_only_workflow() - self.assertEqual(2, len(readonly.get_ready_user_tasks())) - self.assertEqual( - ['Parallel_Approvals_SP.Manager_Approval', - 'Parallel_Approvals_SP.Step1'], - sorted(t.task_spec.name for t in readonly.get_ready_user_tasks())) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval') - - readonly = self.get_read_only_workflow() - self.assertEqual(1, len(readonly.get_ready_user_tasks())) - self.assertEqual('Parallel_Approvals_SP.Step1', - readonly.get_ready_user_tasks()[0].task_spec.name) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - self.do_next_exclusive_step('Parallel_Approvals_SP.Step1') - - readonly = self.get_read_only_workflow() - self.assertEqual(1, len(readonly.get_ready_user_tasks())) - self.assertEqual('Parallel_Approvals_SP.Supervisor_Approval', - readonly.get_ready_user_tasks()[0].task_spec.name) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - self.do_next_exclusive_step( - 'Parallel_Approvals_SP.Supervisor_Approval') - - readonly = self.get_read_only_workflow() - self.assertEqual(1, len(readonly.get_ready_user_tasks())) - self.assertEqual('Approvals.Parallel_SP_Done', - readonly.get_ready_user_tasks()[0].task_spec.name) - self.assertRaises( - AssertionError, readonly.get_ready_user_tasks()[0].complete) - self.do_next_exclusive_step('Approvals.Parallel_SP_Done') - - readonly = self.get_read_only_workflow() - self.assertEqual(0, len(readonly.get_ready_user_tasks())) - self.assertEqual(0, len(readonly.get_waiting_tasks())) - def suite(): return unittest.TestLoader().loadTestsFromTestCase(ApprovalsTest) diff --git a/tests/SpiffWorkflow/bpmn/BpmnSerializerTest.py b/tests/SpiffWorkflow/bpmn/BpmnSerializerTest.py deleted file mode 100644 index a8b6ebd53..000000000 --- a/tests/SpiffWorkflow/bpmn/BpmnSerializerTest.py +++ /dev/null @@ -1,118 +0,0 @@ -import os -import unittest - -from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine -from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer -from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from .BpmnLoaderForTests import TestBpmnParser - - -class BpmnSerializerTest(unittest.TestCase): - CORRELATE = BpmnSerializer - - def load_workflow_spec(self, filename, process_name): - f = os.path.join(os.path.dirname(__file__), 'data', filename) - parser = TestBpmnParser() - parser.add_bpmn_files_by_glob(f) - top_level_spec = parser.get_spec(process_name) - subprocesses = parser.get_subprocess_specs(process_name) - return top_level_spec, subprocesses - - def setUp(self): - super(BpmnSerializerTest, self).setUp() - self.serializer = BpmnSerializer() - self.spec, subprocesses = self.load_workflow_spec('random_fact.bpmn', 'random_fact') - self.workflow = BpmnWorkflow(self.spec, subprocesses) - - def testDeserializeWorkflowSpec(self): - self.assertIsNotNone(self.spec) - - def testSerializeWorkflowSpec(self): - spec_serialized = self.serializer.serialize_workflow_spec(self.spec) - result = self.serializer.deserialize_workflow_spec(spec_serialized) - spec_serialized2 = self.serializer.serialize_workflow_spec(result) - self.assertEqual(spec_serialized, spec_serialized2) - - def testSerializeWorkflow(self): - json = self.serializer.serialize_workflow(self.workflow) - print(json) - - def testDeserializeWorkflow(self): - self._compare_with_deserialized_copy(self.workflow) - - def testDeserializeCallActivityChildren(self): - """Tested as a part of deserialize workflow.""" - pass - - def testSerializeTask(self): - json = self.serializer.serialize_workflow(self.workflow) - print(json) - - def testDeserializeTask(self): - self._compare_with_deserialized_copy(self.workflow) - - def testDeserializeActiveWorkflow(self): - self.workflow.do_engine_steps() - self._compare_with_deserialized_copy(self.workflow) - - def testDeserializeWithData(self): - self.workflow.data["test"] = "my_test" - json = self.serializer.serialize_workflow(self.workflow) - wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec) - self.assertEqual('my_test', wf2.get_data("test")) - - def testDeserializeWithDefaultScriptEngineClass(self): - json = self.serializer.serialize_workflow(self.workflow) - wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec) - self.assertIsNotNone(self.workflow.script_engine) - self.assertIsNotNone(wf2.script_engine) - self.assertEqual(self.workflow.script_engine.__class__, - wf2.script_engine.__class__) - - @unittest.skip("Deserialize does not persist the script engine, Fix me.") - def testDeserializeWithCustomScriptEngine(self): - class CustomScriptEngine(PythonScriptEngine): - pass - - self.workflow.script_engine = CustomScriptEngine() - json = self.serializer.serialize_workflow(self.workflow) - wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec) - self.assertEqual(self.workflow.script_engine.__class__, - wf2.script_engine.__class__) - - def testDeserializeWithDataOnTask(self): - self.workflow.do_engine_steps() - user_task = self.workflow.get_ready_user_tasks()[0] - user_task.data = {"test":"my_test"} - self._compare_with_deserialized_copy(self.workflow) - - def testLastTaskIsSetAndWorksThroughRestore(self): - self.workflow.do_engine_steps() - json = self.serializer.serialize_workflow(self.workflow) - wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec) - self.assertIsNotNone(self.workflow.last_task) - self.assertIsNotNone(wf2.last_task) - self._compare_workflows(self.workflow, wf2) - - def _compare_with_deserialized_copy(self, wf): - json = self.serializer.serialize_workflow(wf) - wf2 = self.serializer.deserialize_workflow(json, workflow_spec=self.spec) - self._compare_workflows(wf, wf2) - - def _compare_workflows(self, w1, w2): - self.assertIsInstance(w1, BpmnWorkflow) - self.assertIsInstance(w2, BpmnWorkflow) - self.assertEqual(w1.data, w2.data) - self.assertEqual(w1.name, w2.name) - for task in w1.get_ready_user_tasks(): - w2_task = w2.get_task(task.id) - self.assertIsNotNone(w2_task) - self.assertEqual(task.data, w2_task.data) - - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(BpmnSerializerTest) - - -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/tests/SpiffWorkflow/bpmn/BpmnWorkflowSerializerTest.py b/tests/SpiffWorkflow/bpmn/BpmnWorkflowSerializerTest.py index ac2ae463c..e5b771564 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnWorkflowSerializerTest.py +++ b/tests/SpiffWorkflow/bpmn/BpmnWorkflowSerializerTest.py @@ -1,13 +1,11 @@ import os import unittest import json -from uuid import uuid4 from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer -from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter @@ -48,13 +46,6 @@ class BpmnWorkflowSerializerTest(unittest.TestCase): version = self.serializer.get_version(spec_serialized) self.assertEqual(version, self.SERIALIZER_VERSION) - def testSerializeToOldSerializerThenNewSerializer(self): - old_serializer = BpmnSerializer() - old_json = old_serializer.serialize_workflow(self.workflow) - new_workflow = old_serializer.deserialize_workflow(old_json) - new_json = self.serializer.serialize_json(new_workflow) - new_workflow_2 = self.serializer.deserialize_json(new_json) - def testSerializeWorkflow(self): serialized = self.serializer.serialize_json(self.workflow) json.loads(serialized) diff --git a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py index 9b7865bd6..8f2f0af53 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py +++ b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py @@ -122,7 +122,7 @@ class BpmnWorkflowTestCase(unittest.TestCase): before_dump = self.workflow.get_dump() # Check that we can actully convert this to JSON json_str = json.dumps(before_state) - after = self.serializer.workflow_from_dict(json.loads(json_str), read_only=False) + after = self.serializer.workflow_from_dict(json.loads(json_str)) # Check that serializing and deserializing results in the same workflow after_state = self.serializer.workflow_to_dict(after) after_dump = after.get_dump() @@ -132,11 +132,7 @@ class BpmnWorkflowTestCase(unittest.TestCase): self.workflow = after def restore(self, state): - self.workflow = self.serializer.workflow_from_dict(state, read_only=False) - - def get_read_only_workflow(self): - state = self._get_workflow_state() - return self.serializer.workflow_from_dict(state, read_only=True) + self.workflow = self.serializer.workflow_from_dict(state) def _get_workflow_state(self, do_steps=True): if do_steps: diff --git a/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py b/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py index f3d1522da..1e3d158e7 100644 --- a/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py +++ b/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py @@ -3,9 +3,9 @@ import unittest from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.exceptions import WorkflowTaskException from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -60,7 +60,7 @@ class CallActivityTest(BpmnWorkflowTestCase): def test_call_acitivity_errors_include_task_trace(self): error_spec = self.subprocesses.get('ErroringBPMN') error_spec, subprocesses = self.load_workflow_spec('call_activity_*.bpmn', 'ErroringBPMN') - with self.assertRaises(WorkflowTaskExecException) as context: + with self.assertRaises(WorkflowTaskException) as context: self.workflow = BpmnWorkflow(error_spec, subprocesses) self.workflow.do_engine_steps() self.assertEquals(2, len(context.exception.task_trace)) diff --git a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py index 12f69a2aa..8cbca47f5 100644 --- a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py +++ b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- import unittest +from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'McDonald, danfunk' @@ -46,7 +46,7 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase): def test_overwrite_function_with_local_variable(self): ready_task = self.workflow.get_tasks(TaskState.READY)[0] ready_task.data = {'custom_function': "bill"} - with self.assertRaises(WorkflowTaskExecException) as e: + with self.assertRaises(WorkflowTaskException) as e: self.workflow.do_engine_steps() self.assertTrue('' in str(e.exception)) self.assertTrue('custom_function' in str(e.exception)) diff --git a/tests/SpiffWorkflow/bpmn/InvalidWorkflowsTest.py b/tests/SpiffWorkflow/bpmn/InvalidWorkflowsTest.py index ffb8f0242..9987567a5 100644 --- a/tests/SpiffWorkflow/bpmn/InvalidWorkflowsTest.py +++ b/tests/SpiffWorkflow/bpmn/InvalidWorkflowsTest.py @@ -31,18 +31,11 @@ class InvalidWorkflowsTest(BpmnWorkflowTestCase): except ValidationException as ex: self.assertTrue('No start event found' in ('%r' % ex), '\'No start event found\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue('No-Start-Event.bpmn20.xml' in ('%r' % ex), + self.assertTrue('No-Start-Event.bpmn20.xml' in ex.file_name, '\'No-Start-Event.bpmn20.xml\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue('process' in ('%r' % ex), - '\'process\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue( - 'sid-669ddebf-4196-41ee-8b04-bcc90bc5f983' in ('%r' % ex), - '\'sid-669ddebf-4196-41ee-8b04-bcc90bc5f983\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue('No Start Event' in ('%r' % ex), - '\'No Start Event\' should be a substring of error message: \'%r\'' % ex) def testSubprocessNotFound(self): - + with self.assertRaises(ValidationException) as exc: self.load_workflow_spec('Invalid-Workflows/Subprocess-Not-Found.bpmn20.xml', 'Subprocess Not Found') self.assertIn("The process 'Missing subprocess' was not found.", str(exc)) @@ -60,15 +53,12 @@ class InvalidWorkflowsTest(BpmnWorkflowTestCase): 'There is no support implemented for this task type' in ( '%r' % ex), '\'There is no support implemented for this task type\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue('Unsupported-Task.bpmn20.xml' in ('%r' % ex), + self.assertTrue('Unsupported-Task.bpmn20.xml' in ex.file_name, '\'Unsupported-Task.bpmn20.xml\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue('businessRuleTask' in ('%r' % ex), - '\'businessRuleTask\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue( - 'sid-75EEAB28-3B69-4282-B91A-0F3C97931834' in ('%r' % ex), - '\'sid-75EEAB28-3B69-4282-B91A-0F3C97931834\' should be a substring of error message: \'%r\'' % ex) - self.assertTrue('Business Rule Task' in ('%r' % ex), - '\'Business Rule Task\' should be a substring of error message: \'%r\'' % ex) + self.assertTrue('businessRuleTask' in ex.tag, + '\'businessRuleTask\' should be a substring of the tag: \'%r\'' % ex) + self.assertTrue('Business Rule Task' in ex.name, + '\'Business Rule Task\' should be the name: \'%s\'' % ex.name) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/ParserTest.py b/tests/SpiffWorkflow/bpmn/ParserTest.py index 5703273e8..59a327758 100644 --- a/tests/SpiffWorkflow/bpmn/ParserTest.py +++ b/tests/SpiffWorkflow/bpmn/ParserTest.py @@ -1,7 +1,8 @@ import unittest import os -from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser +from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser, BpmnValidator +from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException class ParserTest(unittest.TestCase): @@ -27,3 +28,17 @@ class ParserTest(unittest.TestCase): self.assertEqual(generate.data_output_associations[0].name, 'obj_1') self.assertEqual(len(read.data_input_associations), 1) self.assertEqual(read.data_input_associations[0].name, 'obj_1') + + def testValidatorError(self): + parser = BpmnParser(validator=BpmnValidator()) + bpmn_file = os.path.join(os.path.dirname(__file__), 'data', + 'data_object_invalid.bpmn') + errored = False + try: + parser.add_bpmn_file(bpmn_file) + except ValidationException as ex: + errored = True + self.assertEqual(ex.file_name, bpmn_file) + self.assertEqual(14, ex.line_number) + self.assertIn('DataObjectReference_0cm8dnh', str(ex)) + assert(errored, "This should have errored out with a validation exception.") diff --git a/tests/SpiffWorkflow/bpmn/ScriptTest.py b/tests/SpiffWorkflow/bpmn/ScriptTest.py index efe9f1c85..da0a4a29f 100644 --- a/tests/SpiffWorkflow/bpmn/ScriptTest.py +++ b/tests/SpiffWorkflow/bpmn/ScriptTest.py @@ -2,7 +2,7 @@ import unittest -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException +from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -39,7 +39,7 @@ class InlineScriptTest(BpmnWorkflowTestCase): # StartTask doesn't know about testvar, it happened earlier. # calling an exec that references testvar, in the context of the # start task should fail. - with self.assertRaises(WorkflowTaskExecException): + with self.assertRaises(WorkflowTaskException): result = self.workflow.script_engine.evaluate(startTask, 'testvar == True') diff --git a/tests/SpiffWorkflow/bpmn/ServiceTaskTest.py b/tests/SpiffWorkflow/bpmn/ServiceTaskTest.py index 290607081..a0112efea 100644 --- a/tests/SpiffWorkflow/bpmn/ServiceTaskTest.py +++ b/tests/SpiffWorkflow/bpmn/ServiceTaskTest.py @@ -8,16 +8,15 @@ sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase class ServiceTaskTest(BpmnWorkflowTestCase): def setUp(self): - spec, subprocesses = self.load_workflow_spec('service_task.bpmn', + spec, subprocesses = self.load_workflow_spec('service_task.bpmn', 'service_task_example1') - self.workflow = BpmnWorkflow(spec, subprocesses) + self.workflow = BpmnWorkflow(spec, subprocesses) def testRunThroughHappy(self): self.workflow.do_engine_steps() diff --git a/tests/SpiffWorkflow/bpmn/data/data_object_invalid.bpmn b/tests/SpiffWorkflow/bpmn/data/data_object_invalid.bpmn new file mode 100644 index 000000000..1d2d2ed87 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/data_object_invalid.bpmn @@ -0,0 +1,152 @@ + + + + + + Flow_18858hr + + + + + + + + + + Flow_19pyf8s + + + + Flow_1r7v9yo + Flow_1tnu3ej + + + DataObjectReference_0pztwm3 + Property_1uusomz + + + + Flow_18858hr + Flow_0gbxq9s + + DataObjectReference_17fhr1j + + + + Flow_0gbxq9s + Flow_1r7v9yo + + + Flow_1tnu3ej + Flow_19pyf8s + + + DataObjectReference_0cm8dnh + Property_1q5wp77 + + + Flow_0yx8lkz + + + Flow_0yx8lkz + Flow_0rk4i35 + + + + Flow_0rk4i35 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/serializer/dictTest.py b/tests/SpiffWorkflow/bpmn/serializer/dictTest.py deleted file mode 100644 index 3556bee19..000000000 --- a/tests/SpiffWorkflow/bpmn/serializer/dictTest.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- - -from builtins import str -import sys -import unittest -import os -dirname = os.path.dirname(__file__) -sys.path.insert(0, os.path.join(dirname, '..', '..', '..', '..')) - -import uuid -from SpiffWorkflow.bpmn.serializer.dict import BPMNDictionarySerializer -from tests.SpiffWorkflow.serializer.baseTest import SerializerTest -from SpiffWorkflow.workflow import Workflow - - -class BPMNDictionarySerializerTest(SerializerTest): - - def setUp(self): - super(BPMNDictionarySerializerTest, self).setUp() - self.serializer = BPMNDictionarySerializer() - self.return_type = dict - - def _compare_results(self, item1, item2, - exclude_dynamic=False, - exclude_items=None): - exclude_items = exclude_items if exclude_items is not None else [] - if exclude_dynamic: - if 'last_state_change' not in exclude_items: - exclude_items.append('last_state_change') - if 'last_task' not in exclude_items: - exclude_items.append('last_task') - if uuid.UUID not in exclude_items: - exclude_items.append(uuid.UUID) - if type(item1) in exclude_items: - return - - if isinstance(item1, dict): - self.assertIsInstance(item2, dict) - for key, value in list(item1.items()): - self.assertIn(key, item2) - if key in exclude_items: - continue - self._compare_results(value, item2[key], - exclude_dynamic=exclude_dynamic, - exclude_items=exclude_items) - for key in item2: - self.assertIn(key, item1) - - elif isinstance(item1, list): - msg = "item is not a list (is a " + str(type(item2)) + ")" - self.assertIsInstance(item2, list, msg) - msg = "list lengths differ: {} vs {}".format( - len(item1), len(item2)) - self.assertEqual(len(item1), len(item2), msg) - for i, listitem in enumerate(item1): - self._compare_results(listitem, item2[i], - exclude_dynamic=exclude_dynamic, - exclude_items=exclude_items) - - elif isinstance(item1, Workflow): - raise Exception("Item is a Workflow") - - else: - msg = "{}: types differ: {} vs {}".format( - str(item2), type(item1), type(item2)) - self.assertEqual(type(item1), type(item2), msg) - self.assertEqual(item1, item2) - - -def suite(): - return unittest.defaultTestLoader.loadTestsFromTestCase(BPMNDictionarySerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/tests/SpiffWorkflow/bpmn/serializer/jsonTest.py b/tests/SpiffWorkflow/bpmn/serializer/jsonTest.py deleted file mode 100644 index 89ee05428..000000000 --- a/tests/SpiffWorkflow/bpmn/serializer/jsonTest.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys -import unittest -import os -dirname = os.path.dirname(__file__) -sys.path.insert(0, os.path.join(dirname, '..', '..', '..', '..')) - -import json -from SpiffWorkflow.bpmn.serializer.json import BPMNJSONSerializer -from tests.SpiffWorkflow.serializer.dictTest import DictionarySerializerTest - - -class BPMNJSONSerializerTest(DictionarySerializerTest): - - def setUp(self): - super(BPMNJSONSerializerTest, self).setUp() - self.serializer = BPMNJSONSerializer() - self.return_type = str - - def _prepare_result(self, item): - return json.loads(item) - - def _compare_results(self, item1, item2, exclude_dynamic=False, - exclude_items=None): - if exclude_dynamic: - exclude_items = ['__uuid__'] - else: - exclude_items = [] - super(BPMNJSONSerializerTest, self)._compare_results(item1, item2, - exclude_dynamic=exclude_dynamic, - exclude_items=exclude_items) - - -def suite(): - return unittest.defaultTestLoader.loadTestsFromTestCase(BPMNJSONSerializerTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py b/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py index 3ca9f3d05..757767d68 100644 --- a/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py +++ b/tests/SpiffWorkflow/camunda/InvalidBusinessRuleTaskParserTest.py @@ -1,7 +1,7 @@ import os import unittest -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException +from SpiffWorkflow.exceptions import SpiffWorkflowException, WorkflowException from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.workflow import BpmnWorkflow @@ -15,17 +15,23 @@ class BusinessRuleTaskParserTest(BaseTestCase): 'invalid/InvalidDecision.bpmn', 'Process_1', 'invalid_decision.dmn') self.workflow = BpmnWorkflow(self.spec) + def testExceptionPrint(self): + e1 = Exception("test 1") + print (e1) + e = SpiffWorkflowException("test") + print (e) + def testDmnRaisesTaskErrors(self): self.workflow = BpmnWorkflow(self.spec) self.workflow.get_tasks(TaskState.READY)[0].set_data(x=3) try: self.workflow.do_engine_steps() self.assertTrue(False, "An error should have been raised.") - except WorkflowTaskExecException as we: + except WorkflowException as we: self.assertTrue(True, "An error was raised..") - self.assertEquals("InvalidDecisionTaskId", we.sender.name) - self.maxDiff = 1000 - self.assertEquals("Error evaluating expression spam= 1", str(we)) + self.assertEqual("InvalidDecisionTaskId", we.task_spec.name) + self.maxDiff = 1000 + self.assertEquals("Error evaluating expression 'spam= 1'. Rule failed on row 1. Business Rule Task 'Invalid Decision'.", str(we)) def suite(): return unittest.TestLoader().loadTestsFromTestCase(BusinessRuleTaskParserTest) diff --git a/tests/SpiffWorkflow/camunda/data/DMNMultiInstance.bpmn b/tests/SpiffWorkflow/camunda/data/DMNMultiInstance.bpmn index ddf2c44e5..d00fd87d2 100644 --- a/tests/SpiffWorkflow/camunda/data/DMNMultiInstance.bpmn +++ b/tests/SpiffWorkflow/camunda/data/DMNMultiInstance.bpmn @@ -1,5 +1,5 @@ - + Flow_1b29lxw @@ -46,59 +46,59 @@ of documentation - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + - - + + - - - - - - - - - - - - + - - - - - - - - - - - - + - - - - - + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/camunda/data/dmn/test_integer_decision_multi.dmn b/tests/SpiffWorkflow/camunda/data/dmn/test_integer_decision_multi.dmn index 7565b4c0e..a24c84000 100644 --- a/tests/SpiffWorkflow/camunda/data/dmn/test_integer_decision_multi.dmn +++ b/tests/SpiffWorkflow/camunda/data/dmn/test_integer_decision_multi.dmn @@ -1,8 +1,8 @@ - + - + item.x diff --git a/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py b/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py index 33f621912..3de8fa2a6 100644 --- a/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py +++ b/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py @@ -1,9 +1,7 @@ -import json import unittest -from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, \ - EnumFormField -from SpiffWorkflow.specs.base import TaskSpec +from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, EnumFormField +from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec @@ -13,7 +11,6 @@ class UserTaskSpecTest(unittest.TestCase): def create_instance(self): if 'testtask' in self.wf_spec.task_specs: del self.wf_spec.task_specs['testtask'] - task_spec = TaskSpec(self.wf_spec, 'testtask', description='foo') self.form = Form() return UserTask(self.wf_spec, 'userTask', self.form) @@ -33,43 +30,6 @@ class UserTaskSpecTest(unittest.TestCase): self.assertEqual(self.form, self.user_spec.form) def testSerialize(self): - pass - - def test_text_field(self): - form_field = FormField(form_type="text") - form_field.id = "1234" - self.form.add_field(form_field) - self.assertEqual(form_field, self.user_spec.form.fields[0]) - - def test_enum_field(self): - enum_field = EnumFormField() - enum_field.label = "Which kind of fool are you" - enum_field.add_option('old fool', 'This is old, therefor it is good.') - enum_field.add_option('new fool', - 'This is new, therefor it is better.') - self.form.add_field(enum_field) - self.assertEqual(enum_field, self.user_spec.form.fields[-1]) - - def test_properties(self): - form_field = FormField(form_type="text") - self.assertFalse(form_field.has_property("wilma")) - form_field.add_property("wilma", "flintstone") - self.assertTrue(form_field.has_property("wilma")) - self.assertEquals("flintstone", form_field.get_property("wilma")) - - def test_validations(self): - form_field = FormField(form_type="text") - self.assertFalse(form_field.has_validation("barney")) - form_field.add_validation("barney", "rubble") - self.assertTrue(form_field.has_validation("barney")) - self.assertEquals("rubble", form_field.get_validation("barney")) - - def testIsEngineTask(self): - self.assertFalse(self.user_spec.is_engine_task()) - - def test_convert_to_dict(self): - form = Form() - field1 = FormField(form_type="text") field1.id = "quest" field1.label = "What is your quest?" @@ -89,21 +49,14 @@ class UserTaskSpecTest(unittest.TestCase): field2.add_property("description", "You know what to do.") field2.add_validation("maxlength", "25") - form.key = "formKey" - form.add_field(field1) - form.add_field(field2) + self.form.key = "formKey" + self.form.add_field(field1) + self.form.add_field(field2) - def JsonableHandler(Obj): - if hasattr(Obj, 'jsonable'): - return Obj.jsonable() - else: - raise 'Object of type %s with value of %s is not JSON serializable' % ( - type(Obj), repr(Obj)) - - json_form = json.dumps(form, default=JsonableHandler) - actual = json.loads(json_form) - - expected = { + converter = UserTaskConverter() + dct = converter.to_dict(self.user_spec) + self.assertEqual(dct['name'], 'userTask') + self.assertEqual(dct['form'], { "fields": [ { "default_value": "I seek the grail!", @@ -137,12 +90,39 @@ class UserTaskSpecTest(unittest.TestCase): } ], "key": "formKey", - } + }) - expected_parsed = json.loads(json.dumps(expected)) + def test_text_field(self): + form_field = FormField(form_type="text") + form_field.id = "1234" + self.form.add_field(form_field) + self.assertEqual(form_field, self.user_spec.form.fields[0]) - self.maxDiff = None - self.assertDictEqual(actual, expected_parsed) + def test_enum_field(self): + enum_field = EnumFormField() + enum_field.label = "Which kind of fool are you" + enum_field.add_option('old fool', 'This is old, therefor it is good.') + enum_field.add_option('new fool', + 'This is new, therefor it is better.') + self.form.add_field(enum_field) + self.assertEqual(enum_field, self.user_spec.form.fields[-1]) + + def test_properties(self): + form_field = FormField(form_type="text") + self.assertFalse(form_field.has_property("wilma")) + form_field.add_property("wilma", "flintstone") + self.assertTrue(form_field.has_property("wilma")) + self.assertEquals("flintstone", form_field.get_property("wilma")) + + def test_validations(self): + form_field = FormField(form_type="text") + self.assertFalse(form_field.has_validation("barney")) + form_field.add_validation("barney", "rubble") + self.assertTrue(form_field.has_validation("barney")) + self.assertEquals("rubble", form_field.get_validation("barney")) + + def testIsEngineTask(self): + self.assertFalse(self.user_spec.is_engine_task()) def suite(): diff --git a/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py b/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py index 52077059a..320586a9b 100644 --- a/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py +++ b/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py @@ -1,3 +1,4 @@ +from SpiffWorkflow.exceptions import SpiffWorkflowException from SpiffWorkflow.task import TaskState from .BaseTestCase import BaseTestCase from SpiffWorkflow.bpmn.workflow import BpmnWorkflow @@ -18,7 +19,7 @@ class PrescriptPostsciptTest(BaseTestCase): self.call_activity_test(True) def testDataObject(self): - + spec, subprocesses = self.load_workflow_spec('prescript_postscript_data_object.bpmn', 'Process_1') self.workflow = BpmnWorkflow(spec, subprocesses) # Set a on the workflow and b in the first task. @@ -45,8 +46,21 @@ class PrescriptPostsciptTest(BaseTestCase): ready_tasks[0].complete() self.assertDictEqual({'a': 1, 'b': 2, 'c': 12, 'z': 6}, ready_tasks[0].data) + def test_for_error(self, save_restore=False): + + spec, subprocesses = self.load_workflow_spec('prescript_postscript.bpmn', 'Process_1') + self.workflow = BpmnWorkflow(spec, subprocesses) + if save_restore: + self.save_restore() + ready_tasks = self.workflow.get_tasks(TaskState.READY) + # Calling do-engine steps without setting variables will raise an exception. + with self.assertRaises(SpiffWorkflowException) as se: + self.workflow.do_engine_steps() + ex = se.exception + self.assertIn("Error occurred in the Pre-Script", str(ex)) + def call_activity_test(self, save_restore=False): - + spec, subprocesses = self.load_workflow_spec('prescript_postscript_*.bpmn', 'parent') self.workflow = BpmnWorkflow(spec, subprocesses) if save_restore: diff --git a/tests/SpiffWorkflow/spiff/ServiceTaskTest.py b/tests/SpiffWorkflow/spiff/ServiceTaskTest.py index 1d3035b3f..66b2d86a0 100644 --- a/tests/SpiffWorkflow/spiff/ServiceTaskTest.py +++ b/tests/SpiffWorkflow/spiff/ServiceTaskTest.py @@ -9,7 +9,6 @@ sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException from .BaseTestCase import BaseTestCase class ServiceTaskDelegate: diff --git a/tests/SpiffWorkflow/spiff/ServiceTaskVariableTest.py b/tests/SpiffWorkflow/spiff/ServiceTaskVariableTest.py index 12237ae6b..834f0c6f1 100644 --- a/tests/SpiffWorkflow/spiff/ServiceTaskVariableTest.py +++ b/tests/SpiffWorkflow/spiff/ServiceTaskVariableTest.py @@ -9,7 +9,6 @@ sys.path.insert(0, os.path.join(dirname, '..', '..', '..')) from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException from .BaseTestCase import BaseTestCase class ServiceTaskDelegate: