mirror of
https://github.com/status-im/spiff-arena.git
synced 2025-02-26 08:25:24 +00:00
1f51db962 Merge pull request #283 from sartography/feature/better_errors 69fb4967e Patching up some bugs and logical disconnects as I test out the errors. cf5be0096 * Making a few more things consistent in the error messages -- so there isn't filename for validation errors, and file_name for WorkflowExceptions. Same for line_number vs sourceline. * Assure than an error_type is consistently set on exceptions. * ValidationExceptions should not bild up a detailed error message that replicates information available within it. 440ee16c8 Responding to some excellent suggestions from Elizabeth: 655e415e1 Merge pull request #282 from subhakarks/fix-workfowspec-dump 1f6d3cf4e Explain that the error happened in a pre-script or post script. 8119abd14 Added a top level SpiffWorklowException that all exceptions inherit from. Aside from a message string you can append information to these exceptions with "add_note", which is a new method that all exceptions have starting in python 3.11 Switched arguments to the WorkflowException, WorkflowTaskException - which now always takes a string message as the first argument, and named arguments thereafter to be consistent with all other error messages in Python. Consistently raise ValidationExceptions whenever we encounter an error anywhere during parsing of xml. The BPMN/WorkflowTaskExecException is removed, in favor of just calling a WorkflowTaskException. There is nothing BPMN Specific in the logic, so no need for this. Consolidated error message logic so that things like "Did you mean" just get added by default if possible. So we don't have to separately deal with that logic each time. Better Error messages for DMN (include row number as a part of the error information) 13463b5c5 fix for workflowspec dump be26100bc Merge pull request #280 from sartography/feature/remove-unused-bpmn-attributes-and-methods 23a5c1d70 remove 'entering_* methods 4e5875ec8 remove sequence flow 5eed83ab1 Merge pull request #278 from sartography/feature/remove-old-serializer 614f1c68a remove compact serializer and references e7e410d4a remove old serializer and references git-subtree-dir: SpiffWorkflow git-subtree-split: 1f51db962ccaed5810f5d0f7d76a932f056430ab
430 lines
16 KiB
Python
430 lines
16 KiB
Python
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (C) 2012 Matthew Hampton
|
|
#
|
|
# This library is free software; you can redistribute it and/or
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
# License as published by the Free Software Foundation; either
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
#
|
|
# This library is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
# Lesser General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
# License along with this library; if not, write to the Free Software
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
|
# 02110-1301 USA
|
|
|
|
import datetime
|
|
from copy import deepcopy
|
|
|
|
from SpiffWorkflow.task import TaskState
|
|
|
|
class EventDefinition(object):
|
|
"""
|
|
This is the base class for Event Definitions. It implements the default throw/catch
|
|
behavior for events.
|
|
|
|
If internal is true, this event should be thrown to the current workflow
|
|
If external is true, this event should be thrown to the outer workflow
|
|
|
|
Default throw behavior is to send the event based on the values of the internal
|
|
and external flags.
|
|
Default catch behavior is to set the event to fired
|
|
"""
|
|
|
|
# Format to use for specifying dates for time based events
|
|
TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
|
|
|
|
def __init__(self):
|
|
# Ideally I'd mke these parameters, but I don't want to them to be parameters
|
|
# for any subclasses (as they are based on event type, not user choice) and
|
|
# I don't want to write a separate deserializer for every every type.
|
|
self.internal, self.external = True, True
|
|
|
|
@property
|
|
def event_type(self):
|
|
return f'{self.__class__.__module__}.{self.__class__.__name__}'
|
|
|
|
def has_fired(self, my_task):
|
|
return my_task._get_internal_data('event_fired', False)
|
|
|
|
def catch(self, my_task, event_definition=None):
|
|
my_task._set_internal_data(event_fired=True)
|
|
|
|
def throw(self, my_task):
|
|
self._throw(
|
|
event=my_task.task_spec.event_definition,
|
|
workflow=my_task.workflow,
|
|
outer_workflow=my_task.workflow.outer_workflow
|
|
)
|
|
|
|
def reset(self, my_task):
|
|
my_task._set_internal_data(event_fired=False)
|
|
|
|
def _throw(self, event, workflow, outer_workflow, correlations=None):
|
|
# This method exists because usually we just want to send the event in our
|
|
# own task spec, but we can't do that for message events.
|
|
# We also don't have a more sophisticated method for addressing events to
|
|
# a particular process, but this at least provides a mechanism for distinguishing
|
|
# between processes and subprocesses.
|
|
if self.external:
|
|
outer_workflow.catch(event, correlations)
|
|
if self.internal and (self.external and workflow != outer_workflow):
|
|
workflow.catch(event)
|
|
|
|
def __eq__(self, other):
|
|
return self.__class__.__name__ == other.__class__.__name__
|
|
|
|
|
|
class NamedEventDefinition(EventDefinition):
|
|
"""
|
|
Extend the base event class to provide a name for the event. Most throw/catch events
|
|
have names that names that will be used to identify the event.
|
|
|
|
:param name: the name of this event
|
|
"""
|
|
|
|
def __init__(self, name):
|
|
super(NamedEventDefinition, self).__init__()
|
|
self.name = name
|
|
|
|
def reset(self, my_task):
|
|
super(NamedEventDefinition, self).reset(my_task)
|
|
|
|
def __eq__(self, other):
|
|
return self.__class__.__name__ == other.__class__.__name__ and self.name == other.name
|
|
|
|
|
|
class CancelEventDefinition(EventDefinition):
|
|
"""
|
|
Cancel events are only handled by the outerworkflow, as they can only be used inside
|
|
of transaction subprocesses.
|
|
"""
|
|
def __init__(self):
|
|
super(CancelEventDefinition, self).__init__()
|
|
self.internal = False
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Cancel'
|
|
|
|
|
|
class ErrorEventDefinition(NamedEventDefinition):
|
|
"""
|
|
Error events can occur only in subprocesses and as subprocess boundary events. They're
|
|
matched by code rather than name.
|
|
"""
|
|
|
|
def __init__(self, name, error_code=None):
|
|
super(ErrorEventDefinition, self).__init__(name)
|
|
self.error_code = error_code
|
|
self.internal = False
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Error'
|
|
|
|
def __eq__(self, other):
|
|
return self.__class__.__name__ == other.__class__.__name__ and self.error_code in [ None, other.error_code ]
|
|
|
|
|
|
class EscalationEventDefinition(NamedEventDefinition):
|
|
"""
|
|
Escalation events have names, though they don't seem to be used for anything. Instead
|
|
the spec says that the escalation code should be matched.
|
|
"""
|
|
|
|
def __init__(self, name, escalation_code=None):
|
|
"""
|
|
Constructor.
|
|
|
|
:param escalation_code: The escalation code this event should
|
|
react to. If None then all escalations will activate this event.
|
|
"""
|
|
super(EscalationEventDefinition, self).__init__(name)
|
|
self.escalation_code = escalation_code
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Escalation'
|
|
|
|
def __eq__(self, other):
|
|
return self.__class__.__name__ == other.__class__.__name__ and self.escalation_code in [ None, other.escalation_code ]
|
|
|
|
|
|
class CorrelationProperty:
|
|
"""Rules for generating a correlation key when a message is sent or received."""
|
|
|
|
def __init__(self, name, expression, correlation_keys):
|
|
self.name = name # This is the property name
|
|
self.expression = expression # This is how it's generated
|
|
self.correlation_keys = correlation_keys # These are the keys it's used by
|
|
|
|
|
|
class MessageEventDefinition(NamedEventDefinition):
|
|
"""The default message event."""
|
|
|
|
def __init__(self, name, correlation_properties=None):
|
|
super().__init__(name)
|
|
self.correlation_properties = correlation_properties or []
|
|
self.payload = None
|
|
self.internal = False
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Message'
|
|
|
|
def catch(self, my_task, event_definition = None):
|
|
self.update_internal_data(my_task, event_definition)
|
|
super(MessageEventDefinition, self).catch(my_task, event_definition)
|
|
|
|
def throw(self, my_task):
|
|
# We can't update our own payload, because if this task is reached again
|
|
# we have to evaluate it again so we have to create a new event
|
|
event = MessageEventDefinition(self.name, self.correlation_properties)
|
|
# Generating a payload unfortunately needs to be handled using custom extensions
|
|
# However, there needs to be something to apply the correlations to in the
|
|
# standard case and this is line with the way Spiff works otherwise
|
|
event.payload = deepcopy(my_task.data)
|
|
correlations = self.get_correlations(my_task.workflow.script_engine, event.payload)
|
|
my_task.workflow.correlations.update(correlations)
|
|
self._throw(event, my_task.workflow, my_task.workflow.outer_workflow, correlations)
|
|
|
|
def update_internal_data(self, my_task, event_definition):
|
|
my_task.internal_data[event_definition.name] = event_definition.payload
|
|
|
|
def update_task_data(self, my_task):
|
|
# I've added this method so that different message implementations can handle
|
|
# copying their message data into the task
|
|
payload = my_task.internal_data.get(self.name)
|
|
if payload is not None:
|
|
my_task.set_data(**payload)
|
|
|
|
def get_correlations(self, script_engine, payload):
|
|
correlations = {}
|
|
for property in self.correlation_properties:
|
|
for key in property.correlation_keys:
|
|
if key not in correlations:
|
|
correlations[key] = {}
|
|
correlations[key][property.name] = script_engine._evaluate(property.expression, payload)
|
|
return correlations
|
|
|
|
|
|
class NoneEventDefinition(EventDefinition):
|
|
"""
|
|
This class defines behavior for NoneEvents. We override throw to do nothing.
|
|
"""
|
|
def __init__(self):
|
|
self.internal, self.external = False, False
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Default'
|
|
|
|
def throw(self, my_task):
|
|
"""It's a 'none' event, so nothing to throw."""
|
|
pass
|
|
|
|
def reset(self, my_task):
|
|
"""It's a 'none' event, so nothing to reset."""
|
|
pass
|
|
|
|
|
|
class SignalEventDefinition(NamedEventDefinition):
|
|
"""The SignalEventDefinition is the implementation of event definition used for Signal Events."""
|
|
|
|
@property
|
|
def spec_type(self):
|
|
return 'Signal'
|
|
|
|
class TerminateEventDefinition(EventDefinition):
|
|
"""The TerminateEventDefinition is the implementation of event definition used for Termination Events."""
|
|
|
|
def __init__(self):
|
|
super(TerminateEventDefinition, self).__init__()
|
|
self.external = False
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Terminate'
|
|
|
|
class TimerEventDefinition(EventDefinition):
|
|
"""
|
|
The TimerEventDefinition is the implementation of event definition used for
|
|
Catching Timer Events (Timer events aren't thrown).
|
|
"""
|
|
|
|
def __init__(self, label, dateTime):
|
|
"""
|
|
Constructor.
|
|
|
|
:param label: The label of the event. Used for the description.
|
|
|
|
:param dateTime: The dateTime expression for the expiry time. This is
|
|
passed to the Script Engine and must evaluate to a datetime (in the case of
|
|
a time-date event) or a timedelta (in the case of a duration event).
|
|
"""
|
|
super(TimerEventDefinition, self).__init__()
|
|
self.label = label
|
|
self.dateTime = dateTime
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Timer'
|
|
|
|
def has_fired(self, my_task):
|
|
"""
|
|
The Timer is considered to have fired if the evaluated dateTime
|
|
expression is before datetime.datetime.now()
|
|
"""
|
|
|
|
if my_task.internal_data.get('event_fired'):
|
|
# If we manually send this event, this will be set
|
|
return True
|
|
|
|
dt = my_task.workflow.script_engine.evaluate(my_task, self.dateTime)
|
|
if isinstance(dt,datetime.timedelta):
|
|
if my_task._get_internal_data('start_time',None) is not None:
|
|
start_time = datetime.datetime.strptime(my_task._get_internal_data('start_time',None), self.TIME_FORMAT)
|
|
elapsed = datetime.datetime.now() - start_time
|
|
return elapsed > dt
|
|
else:
|
|
my_task.internal_data['start_time'] = datetime.datetime.now().strftime(self.TIME_FORMAT)
|
|
return False
|
|
|
|
if dt is None:
|
|
return False
|
|
if isinstance(dt, datetime.datetime):
|
|
if dt.tzinfo:
|
|
tz = dt.tzinfo
|
|
now = tz.fromutc(datetime.datetime.utcnow().replace(tzinfo=tz))
|
|
else:
|
|
now = datetime.datetime.now()
|
|
else:
|
|
# assume type is a date, not datetime
|
|
now = datetime.date.today()
|
|
return now > dt
|
|
|
|
def __eq__(self, other):
|
|
return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label
|
|
|
|
|
|
class CycleTimerEventDefinition(EventDefinition):
|
|
"""
|
|
The TimerEventDefinition is the implementation of event definition used for
|
|
Catching Timer Events (Timer events aren't thrown).
|
|
|
|
The cycle definition should evaluate to a tuple of
|
|
(n repetitions, repetition duration)
|
|
"""
|
|
def __init__(self, label, cycle_definition):
|
|
|
|
super(CycleTimerEventDefinition, self).__init__()
|
|
self.label = label
|
|
# The way we're using cycle timers doesn't really align with how the BPMN spec
|
|
# describes is (the example of "every monday at 9am")
|
|
# I am not sure why this isn't a subprocess with a repeat count that starts
|
|
# with a duration timer
|
|
self.cycle_definition = cycle_definition
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Cycle Timer'
|
|
|
|
def has_fired(self, my_task):
|
|
# We will fire this timer whenever a cycle completes
|
|
# The task itself will manage counting how many times it fires
|
|
|
|
if my_task.internal_data.get('event_fired'):
|
|
# If we manually send this event, this will be set
|
|
return True
|
|
|
|
repeat, delta = my_task.workflow.script_engine.evaluate(my_task, self.cycle_definition)
|
|
|
|
# This is the first time we've entered this event
|
|
if my_task.internal_data.get('repeat') is None:
|
|
my_task.internal_data['repeat'] = repeat
|
|
if my_task.get_data('repeat_count') is None:
|
|
# This is now a looping task, and if we use internal data, the repeat count won't persist
|
|
my_task.set_data(repeat_count=0)
|
|
|
|
now = datetime.datetime.now()
|
|
if my_task._get_internal_data('start_time') is None:
|
|
start_time = now
|
|
my_task.internal_data['start_time'] = now.strftime(self.TIME_FORMAT)
|
|
else:
|
|
start_time = datetime.datetime.strptime(my_task._get_internal_data('start_time'),self.TIME_FORMAT)
|
|
|
|
if my_task.get_data('repeat_count') >= repeat or (now - start_time) < delta:
|
|
return False
|
|
return True
|
|
|
|
def reset(self, my_task):
|
|
repeat_count = my_task.get_data('repeat_count')
|
|
if repeat_count is None:
|
|
# If this is a boundary event, then repeat count will not have been set
|
|
my_task.set_data(repeat_count=0)
|
|
else:
|
|
my_task.set_data(repeat_count=repeat_count + 1)
|
|
my_task.internal_data['start_time'] = None
|
|
super(CycleTimerEventDefinition, self).reset(my_task)
|
|
|
|
def __eq__(self, other):
|
|
return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label
|
|
|
|
|
|
class MultipleEventDefinition(EventDefinition):
|
|
|
|
def __init__(self, event_definitions=None, parallel=False):
|
|
super().__init__()
|
|
self.event_definitions = event_definitions or []
|
|
self.parallel = parallel
|
|
|
|
@property
|
|
def event_type(self):
|
|
return 'Multiple'
|
|
|
|
def has_fired(self, my_task):
|
|
|
|
seen_events = my_task.internal_data.get('seen_events', [])
|
|
for event in self.event_definitions:
|
|
if isinstance(event, (TimerEventDefinition, CycleTimerEventDefinition)):
|
|
child = [c for c in my_task.children if c.task_spec.event_definition == event]
|
|
child[0].task_spec._update_hook(child[0])
|
|
child[0]._set_state(TaskState.MAYBE)
|
|
if event.has_fired(my_task):
|
|
seen_events.append(event)
|
|
|
|
if self.parallel:
|
|
# Parallel multiple need to match all events
|
|
return all(event in seen_events for event in self.event_definitions)
|
|
else:
|
|
return len(seen_events) > 0
|
|
|
|
def catch(self, my_task, event_definition=None):
|
|
event_definition.catch(my_task, event_definition)
|
|
seen_events = my_task.internal_data.get('seen_events', []) + [event_definition]
|
|
my_task._set_internal_data(seen_events=seen_events)
|
|
|
|
def reset(self, my_task):
|
|
my_task.internal_data.pop('seen_events', None)
|
|
super().reset(my_task)
|
|
|
|
def __eq__(self, other):
|
|
# This event can catch any of the events associated with it
|
|
for event in self.event_definitions:
|
|
if event == other:
|
|
return True
|
|
return False
|
|
|
|
def throw(self, my_task):
|
|
# Mutiple events throw all associated events when they fire
|
|
for event_definition in self.event_definitions:
|
|
self._throw(
|
|
event=event_definition,
|
|
workflow=my_task.workflow,
|
|
outer_workflow=my_task.workflow.outer_workflow
|
|
) |