spiff-arena/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py
jasquat 35ef5cbe54 Squashed 'SpiffWorkflow/' changes from 4195453a4..1f51db962
1f51db962 Merge pull request #283 from sartography/feature/better_errors
69fb4967e Patching up some bugs and logical disconnects as I test out the errors.
cf5be0096 * Making a few more things consistent in the error messages -- so there isn't filename for validation errors, and file_name for WorkflowExceptions.  Same for line_number vs sourceline. * Assure than an error_type is consistently set on exceptions. * ValidationExceptions should not bild up a detailed error message that replicates information available within it.
440ee16c8 Responding to some excellent suggestions from Elizabeth:
655e415e1 Merge pull request #282 from subhakarks/fix-workfowspec-dump
1f6d3cf4e Explain that the error happened in a pre-script or post script.
8119abd14 Added a top level SpiffWorklowException that all exceptions inherit from.  Aside from a message string you can append information to these exceptions with "add_note", which is a new method that all exceptions have starting in python 3.11 Switched arguments to the WorkflowException, WorkflowTaskException - which now always takes a string message as the first argument, and named arguments thereafter to be consistent with all other error messages in Python. Consistently raise ValidationExceptions whenever we encounter an error anywhere during parsing of xml. The BPMN/WorkflowTaskExecException is removed, in favor of just calling a WorkflowTaskException.  There is nothing BPMN Specific in the logic, so no need for this. Consolidated error message logic so that things like "Did you mean" just get added by default if possible.  So we don't have to separately deal with that logic each time. Better Error messages for DMN (include row number as a part of the error information)
13463b5c5 fix for workflowspec dump
be26100bc Merge pull request #280 from sartography/feature/remove-unused-bpmn-attributes-and-methods
23a5c1d70 remove 'entering_* methods
4e5875ec8 remove sequence flow
5eed83ab1 Merge pull request #278 from sartography/feature/remove-old-serializer
614f1c68a remove compact serializer and references
e7e410d4a remove old serializer and references

git-subtree-dir: SpiffWorkflow
git-subtree-split: 1f51db962ccaed5810f5d0f7d76a932f056430ab
2023-01-19 10:47:07 -05:00

179 lines
5.9 KiB
Python

# -*- coding: utf-8 -*-
# Copyright (C) 2012 Matthew Hampton
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from copy import deepcopy
import logging
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
from ...task import TaskState
from .UnstructuredJoin import UnstructuredJoin
from ...specs.Simple import Simple
from ...specs.WorkflowSpec import WorkflowSpec
data_log = logging.getLogger('spiff.data')
class _EndJoin(UnstructuredJoin):
def _check_threshold_unstructured(self, my_task, force=False):
# Look at the tree to find all ready and waiting tasks (excluding
# ourself). The EndJoin waits for everyone!
waiting_tasks = []
for task in my_task.workflow.get_tasks(TaskState.READY | TaskState.WAITING):
if task.thread_id != my_task.thread_id:
continue
if task.task_spec == my_task.task_spec:
continue
is_mine = False
w = task.workflow
if w == my_task.workflow:
is_mine = True
while w and w.outer_workflow != w:
w = w.outer_workflow
if w == my_task.workflow:
is_mine = True
if is_mine:
waiting_tasks.append(task)
return force or len(waiting_tasks) == 0, waiting_tasks
def _on_complete_hook(self, my_task):
super(_EndJoin, self)._on_complete_hook(my_task)
my_task.workflow.data.update(my_task.data)
class BpmnDataSpecification:
def __init__(self, name, description=None):
"""
:param name: the name of the task (the BPMN ID)
:param description: the task description (the BPMN name)
"""
self.name = name
self.description = description or name
# In the future, we can add schemas defining the objects here.
def get(self, my_task):
"""Copy a value form the workflow data to the task data."""
if self.name not in my_task.workflow.data:
message = f"Workflow variable {self.name} not found"
raise WorkflowDataException(my_task, data_input=self, message=message)
my_task.data[self.name] = deepcopy(my_task.workflow.data[self.name])
def set(self, my_task):
"""Copy a value from the task data to the workflow data"""
if self.name not in my_task.data:
message = f"Task variable {self.name} not found"
raise WorkflowDataException(my_task, data_output=self, message=message)
my_task.workflow.data[self.name] = deepcopy(my_task.data[self.name])
del my_task.data[self.name]
data_log.info(f'Set workflow variable {self.name}', extra=my_task.log_info())
def copy(self, source, destination, data_input=False, data_output=False):
"""Copy a value from one task to another."""
if self.name not in source.data:
message = f"Unable to copy {self.name}"
raise WorkflowDataException(
source,
data_input=self if data_input else None,
data_output=self if data_output else None,
message=message
)
destination.data[self.name] = deepcopy(source.data[self.name])
class BpmnProcessSpec(WorkflowSpec):
"""
This class represents the specification of a BPMN process workflow. This
specialises the standard Spiff WorkflowSpec class with a few extra methods
and attributes.
"""
def __init__(self, name=None, description=None, filename=None, svg=None):
"""
Constructor.
:param svg: This provides the SVG representation of the workflow as an
LXML node. (optional)
"""
super(BpmnProcessSpec, self).__init__(name=name, filename=filename)
self.end = _EndJoin(self, '%s.EndJoin' % (self.name))
end = Simple(self, 'End')
end.follow(self.end)
self.svg = svg
self.description = description
self.data_inputs = []
self.data_outputs = []
self.data_objects = {}
self.correlation_keys = {}
def get_all_lanes(self):
"""
Returns a set of the distinct lane names used in the process (including
called activities)
"""
done = set()
lanes = set()
def recursive_find(task_spec):
if task_spec in done:
return
done.add(task_spec)
if hasattr(task_spec, 'lane') and task_spec.lane:
lanes.add(task_spec.lane)
if hasattr(task_spec, 'spec'):
recursive_find(task_spec.spec.start)
for t in task_spec.outputs:
recursive_find(t)
recursive_find(self.start)
return lanes
def get_specs_depth_first(self):
"""
Get the specs for all processes (including called ones), in depth first
order.
"""
done = set()
specs = [self]
def recursive_find(task_spec):
if task_spec in done:
return
done.add(task_spec)
if hasattr(task_spec, 'spec'):
specs.append(task_spec.spec)
recursive_find(task_spec.spec.start)
for t in task_spec.outputs:
recursive_find(t)
recursive_find(self.start)
return specs