mirror of
https://github.com/status-im/spiff-arena.git
synced 2025-01-31 04:05:01 +00:00
e2022f401a
01a25fc3f Merge pull request #333 from sartography/feature/ruff 99c7bd0c7 ruff linting fixes 56d170ba1 Cleaning up badges in the readme. 51c13be93 tweaking action, adding button 96275ad7c Adding a github action to run tests c6c40976a minor fix to please sonarcloud. 03316babb Merge pull request #332 from sartography/updates-for-2.0-release ab70a34b5 Release Notes for 2.0.0_rc1 f0bf79bd9 copy edits a7c726951 Release Notes for 2.0.0_rc1 5f0468ba4 Merge pull request #330 from sartography/updates-for-2.0-release b9ad24406 Mostly minor edits e284dd8e2 corrections and tweaks to documentation 4b2e62600 add more examples 1ea258c6a update spiffworkflow concepts 851d7cdf6 fix a few bugs I found while testing the example repo 7a0a6bdf8 update bpmn docs 07c153f2d save/restore nested subprocess tests 340e9983b Merge branch 'main' of github.com:sartography/spiffworkflow into main 618afbc59 It is rare to submit an update that touches upon both religion and the origins of the universe. I think, for the sake of supporting all view points we must offer the possibility that there can be a thing that is not a child, but rather the beginning of all childen, that there is a chicken to the first egg, a single original big bank. a68dec77e use raw strings for regexes using escape sequences w/ burnettk 4644f2810 Merge pull request #329 from sartography/task/remove-deprecated-functions ca65602c0 correct typo in filename 39ab83f1f remove one deprecated and unused feature 23d54e524 Merge pull request #328 from sartography/improvement/task-spec-attributes 544614aa9 change dmn bpmn_id method to property 12ad185a4 update bpmnworkflow.waiting_events to use classname aec77097d fix some typos & add a few missing licenses 4b87c6d0c add some changes that didn't get included in the merge commit 965a5d4e1 Merge branch 'main' into improvement/task-spec-attributes a844b34f9 alternate bomnworkflow.cancel 0a455cdd2 Merge pull request #327 from sartography/feature/mark_tasks_in_sub_workflows_as_future_if_reseting_to_a_task_before_subworkflow 2bda992aa cancel tasks in subprocesses and return cancelled tasks 309937362 take account that we reset the parent when checking all sub-process executions. d4bcf1290 handle nested subprocesses when resetting tasks 032bedea6 reset subprocess task when resetting a task inside the subprocess 3a6abe157 change reset workflow to drop tasks and re-predict e9cd65757 move exceptions for bpmn into bpmn package e654f2ff1 add bpmn_id and bpmn_name attributes to task specs 74bb9cf1a Found that tasks within a sub-workflow were left in a state of "READY" after resetting to task before the sub-workflow. 957a8faec make all task specs in bpmn processes bpmn tasks b6070005c create actual mixin classes & improve package structure 666a9e4e5 Merge pull request #326 from sartography/feature/boundary_event_reset_fix 9fe5ae4ad Whenever a task is reset who's parent is a "_ParentBoundaryEvent" class, reset to that parent boundary event instead, and execute it, so that all the boundary events are reset to the correct point as well. fbc071af5 remove 'is_engine_step' and use existing 'manual' attribute instead 0d8e53a25 remove unused attributes, minor parser improvements 6ae98b585 Merge pull request #325 from sartography/bugfix/make-data-objects-available-to-gateways cefcd3733 make data objects available to gateways 6060fe778 Merge pull request #324 from sartography/task/update-license efa24bed2 update license 56271f7f7 Merge pull request #323 from sartography/bugfix/handle-dash-in-dmn 6de4e7e01 Merge pull request #322 from sartography/improvement/remove-celery 6ee0668cb remove unnecessary dependencies in test 7ceae68c2 change literal '-' in DMN input to None 4cffc7e7a remove celery task and dependency 580d6e516 Merge pull request #321 from sartography/improvement/allow-duplicate-subprocess-names e4440d4df remove legacy signavio parser 477a23184 remove absolute imports from tests failing in CI 15a812a92 use process ids only when storing process specs abaf1b9e9 move parallel gateway tests to their own package 29fd2d0d9 remove some redundant, unused, or unnecessary tests & consolidate others fda1480bc remove unused CORRELATE attribute from tests 21a2fdbee remove signavio files 299c2613c Merge pull request #320 from sartography/parser_funcs 01afc9f6e PR feedback 646737834 Cleanup dfd3f8214 Add same methods for dmn 764e33ccd Rename file, fix tests 9646abca4 Add bpmn in memory parser functions and tests 58f6bd317 Merge pull request #319 from sartography/feature/better_task_order_for_sub_processes fd7c9308f By swapping the order of these lines, we can assure that a call activity is returned BEFORE the tasks that it contains, rather than after it. 0a7ec19d6 Merge pull request #318 from sartography/feature/optionally-skip-call-activities-when-parsing 3430a2e9f add option to skip parsing call activities 1b1da1dd2 Merge pull request #317 from sartography/bugfix/non-bpmn-tutorial e82345d68 remove some bpmn-related stuff from core serializer 6f9bc279c use name for inputs/outputs in base serializer -- not sure why this was ever changed git-subtree-dir: SpiffWorkflow git-subtree-split: 01a25fc3f829786c4b65d19fd0fda408de37c79f
385 lines
13 KiB
Python
385 lines
13 KiB
Python
# Copyright (C) 2007 Samuel Abels, 2023 Sartography
|
|
#
|
|
# This file is part of SpiffWorkflow.
|
|
#
|
|
# SpiffWorkflow is free software; you can redistribute it and/or
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
# License as published by the Free Software Foundation; either
|
|
# version 3.0 of the License, or (at your option) any later version.
|
|
#
|
|
# SpiffWorkflow is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
# Lesser General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
# License along with this library; if not, write to the Free Software
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
|
# 02110-1301 USA
|
|
|
|
import logging
|
|
|
|
from .specs.Simple import Simple
|
|
from .task import Task, TaskState
|
|
from .util.compat import mutex
|
|
from .util.event import Event
|
|
from .exceptions import TaskNotFoundException, WorkflowException
|
|
|
|
logger = logging.getLogger('spiff')
|
|
|
|
|
|
class Workflow(object):
|
|
|
|
"""
|
|
The engine that executes a workflow.
|
|
It is a essentially a facility for managing all branches.
|
|
A Workflow is also the place that holds the data of a running workflow.
|
|
"""
|
|
|
|
def __init__(self, workflow_spec, deserializing=False, **kwargs):
|
|
"""
|
|
Constructor.
|
|
|
|
:type workflow_spec: specs.WorkflowSpec
|
|
:param workflow_spec: The workflow specification.
|
|
:type deserializing: bool
|
|
:param deserializing: set to true when deserializing to avoid
|
|
generating tasks twice (and associated problems with multiple
|
|
hierarchies of tasks)
|
|
"""
|
|
self.name = None
|
|
assert workflow_spec is not None
|
|
self.spec = workflow_spec
|
|
self.data = {}
|
|
self.outer_workflow = kwargs.get('parent', self)
|
|
self.locks = {}
|
|
self.last_task = None
|
|
if 'Root' in workflow_spec.task_specs:
|
|
root = workflow_spec.task_specs['Root']
|
|
else:
|
|
root = Simple(workflow_spec, 'Root')
|
|
|
|
# Setting TaskState.COMPLETED prevents the root task from being executed.
|
|
self.task_tree = Task(self, root, state=TaskState.COMPLETED)
|
|
start = self.task_tree._add_child(self.spec.start, state=TaskState.FUTURE)
|
|
self.success = True
|
|
self.debug = False
|
|
|
|
# Events.
|
|
self.completed_event = Event()
|
|
|
|
if not deserializing:
|
|
self._predict()
|
|
if 'parent' not in kwargs:
|
|
start.task_spec._update(start)
|
|
logger.info('Initialize', extra=self.log_info())
|
|
|
|
self.task_mapping = self._get_task_mapping()
|
|
|
|
def log_info(self, dct=None):
|
|
extra = dct or {}
|
|
extra.update({
|
|
'workflow_spec': self.spec.name,
|
|
'workflow_name': self.spec.description,
|
|
'task_spec': '-',
|
|
'task_type': None,
|
|
'task_id': None,
|
|
'data': None,
|
|
})
|
|
return extra
|
|
|
|
def is_completed(self):
|
|
"""
|
|
Returns True if the entire Workflow is completed, False otherwise.
|
|
|
|
:rtype: bool
|
|
:return: Whether the workflow is completed.
|
|
"""
|
|
mask = TaskState.NOT_FINISHED_MASK
|
|
iter = Task.Iterator(self.task_tree, mask)
|
|
try:
|
|
next(iter)
|
|
except StopIteration:
|
|
# No waiting tasks found.
|
|
return True
|
|
return False
|
|
|
|
def _predict(self, mask=TaskState.NOT_FINISHED_MASK):
|
|
for task in Workflow.get_tasks(self,TaskState.NOT_FINISHED_MASK):
|
|
task.task_spec._predict(task, mask=mask)
|
|
|
|
def _get_waiting_tasks(self):
|
|
waiting = Task.Iterator(self.task_tree, TaskState.WAITING)
|
|
return [w for w in waiting]
|
|
|
|
def _task_completed_notify(self, task):
|
|
if task.get_name() == 'End':
|
|
self.data.update(task.data)
|
|
# Update the state of every WAITING task.
|
|
for thetask in self._get_waiting_tasks():
|
|
thetask.task_spec._update(thetask)
|
|
if self.completed_event.n_subscribers() == 0:
|
|
# Since is_completed() is expensive it makes sense to bail
|
|
# out if calling it is not necessary.
|
|
return
|
|
if self.is_completed():
|
|
self.completed_event(self)
|
|
|
|
def _get_mutex(self, name):
|
|
if name not in self.locks:
|
|
self.locks[name] = mutex()
|
|
return self.locks[name]
|
|
|
|
def _get_task_mapping(self):
|
|
task_mapping = {}
|
|
for task in self.task_tree:
|
|
thread_task_mapping = task_mapping.get(task.thread_id, {})
|
|
tasks = thread_task_mapping.get(task.task_spec, set())
|
|
tasks.add(task)
|
|
thread_task_mapping[task.task_spec] = tasks
|
|
task_mapping[task.thread_id] = thread_task_mapping
|
|
return task_mapping
|
|
|
|
def update_task_mapping(self):
|
|
"""
|
|
Update the task_mapping of workflow, make sure the method is called
|
|
every time you reconstruct task instance.
|
|
"""
|
|
self.task_mapping = self._get_task_mapping()
|
|
|
|
def set_data(self, **kwargs):
|
|
"""
|
|
Defines the given attribute/value pairs.
|
|
"""
|
|
self.data.update(kwargs)
|
|
|
|
def get_data(self, name, default=None):
|
|
"""
|
|
Returns the value of the data field with the given name, or the given
|
|
default value if the data field does not exist.
|
|
|
|
:type name: str
|
|
:param name: A data field name.
|
|
:type default: obj
|
|
:param default: Return this value if the data field does not exist.
|
|
:rtype: obj
|
|
:returns: The value of the data field.
|
|
"""
|
|
return self.data.get(name, default)
|
|
|
|
def cancel(self, success=False):
|
|
"""
|
|
Cancels all open tasks in the workflow.
|
|
|
|
:type success: bool
|
|
:param success: Whether the Workflow should be marked as successfully completed.
|
|
"""
|
|
self.success = success
|
|
cancel = []
|
|
for task in Task.Iterator(self.task_tree, TaskState.NOT_FINISHED_MASK):
|
|
cancel.append(task)
|
|
for task in cancel:
|
|
task.cancel()
|
|
logger.info(f'Cancel with {len(cancel)} remaining', extra=self.log_info())
|
|
return cancel
|
|
|
|
def get_task_spec_from_name(self, name):
|
|
"""
|
|
Returns the task spec with the given name.
|
|
|
|
:type name: str
|
|
:param name: The name of the task.
|
|
:rtype: TaskSpec
|
|
:returns: The task spec with the given name.
|
|
"""
|
|
return self.spec.get_task_spec_from_name(name)
|
|
|
|
def get_tasks_from_spec_name(self, name):
|
|
"""
|
|
Returns all tasks whose spec has the given name.
|
|
|
|
:type name: str
|
|
:param name: The name of a task spec.
|
|
:rtype: list[Task]
|
|
:returns: A list of tasks that relate to the spec with the given name.
|
|
"""
|
|
return [task for task in self.get_tasks_iterator() if task.task_spec.name == name]
|
|
|
|
def get_tasks(self, state=TaskState.ANY_MASK):
|
|
"""
|
|
Returns a list of Task objects with the given state.
|
|
|
|
:type state: integer
|
|
:param state: A bitmask of states.
|
|
:rtype: list[Task]
|
|
:returns: A list of tasks.
|
|
"""
|
|
return [t for t in Task.Iterator(self.task_tree, state)]
|
|
|
|
def get_tasks_iterator(self, state=TaskState.ANY_MASK):
|
|
"""
|
|
Returns a iterator of Task objects with the given state.
|
|
|
|
:type state: integer
|
|
:param state: A bitmask of states.
|
|
:rtype: Task.Iterator
|
|
:returns: A list of tasks.
|
|
"""
|
|
return Task.Iterator(self.task_tree, state)
|
|
|
|
def get_task_from_id(self, task_id, tasklist=None):
|
|
"""
|
|
Returns the task with the given id.
|
|
|
|
:type id:integer
|
|
:param id: The id of a task.
|
|
:param tasklist: Optional cache of get_tasks for operations
|
|
where we are calling multiple times as when we
|
|
are deserializing the workflow
|
|
:rtype: Task
|
|
:returns: The task with the given id.
|
|
"""
|
|
if task_id is None:
|
|
raise WorkflowException('task_id is None', task_spec=self.spec)
|
|
tasklist = tasklist or self.task_tree
|
|
for task in self.task_tree:
|
|
if task.id == task_id:
|
|
return task
|
|
msg = 'A task with the given task_id (%s) was not found' % task_id
|
|
raise TaskNotFoundException(msg, task_spec=self.spec)
|
|
|
|
def run_task_from_id(self, task_id):
|
|
"""
|
|
Runs the task with the given id.
|
|
|
|
:type task_id: integer
|
|
:param task_id: The id of the Task object.
|
|
"""
|
|
task = self.get_task_from_id(task_id)
|
|
return task.run()
|
|
|
|
def reset_from_task_id(self, task_id, data=None):
|
|
"""
|
|
Runs the task with the given id.
|
|
|
|
:type task_id: integer
|
|
:param task_id: The id of the Task object.
|
|
:param data: optionall set the task data
|
|
"""
|
|
task = self.get_task_from_id(task_id)
|
|
return task.reset_token(data)
|
|
|
|
def run_next(self, pick_up=True, halt_on_manual=True):
|
|
"""
|
|
Runs the next task.
|
|
Returns True if completed, False otherwise.
|
|
|
|
:type pick_up: bool
|
|
:param pick_up: When True, this method attempts to choose the next
|
|
task not by searching beginning at the root, but by
|
|
searching from the position at which the last call
|
|
of complete_next() left off.
|
|
:type halt_on_manual: bool
|
|
:param halt_on_manual: When True, this method will not attempt to
|
|
complete any tasks that have manual=True.
|
|
See :meth:`SpiffWorkflow.specs.TaskSpec.__init__`
|
|
:rtype: bool
|
|
:returns: True if all tasks were completed, False otherwise.
|
|
"""
|
|
# Try to pick up where we left off.
|
|
blacklist = []
|
|
if pick_up and self.last_task is not None:
|
|
try:
|
|
iter = Task.Iterator(self.last_task, TaskState.READY)
|
|
task = next(iter)
|
|
except StopIteration:
|
|
task = None
|
|
self.last_task = None
|
|
if task is not None:
|
|
if not (halt_on_manual and task.task_spec.manual):
|
|
if task.run():
|
|
self.last_task = task
|
|
return True
|
|
blacklist.append(task)
|
|
|
|
# Walk through all ready tasks.
|
|
for task in Task.Iterator(self.task_tree, TaskState.READY):
|
|
for blacklisted_task in blacklist:
|
|
if task._is_descendant_of(blacklisted_task):
|
|
continue
|
|
if not (halt_on_manual and task.task_spec.manual):
|
|
if task.run():
|
|
self.last_task = task
|
|
return True
|
|
blacklist.append(task)
|
|
|
|
# Walk through all waiting tasks.
|
|
for task in Task.Iterator(self.task_tree, TaskState.WAITING):
|
|
task.task_spec._update(task)
|
|
if not task._has_state(TaskState.WAITING):
|
|
self.last_task = task
|
|
return True
|
|
return False
|
|
|
|
def run_all(self, pick_up=True, halt_on_manual=True):
|
|
"""
|
|
Runs all branches until completion. This is a convenience wrapper
|
|
around :meth:`complete_next`, and the pick_up argument is passed
|
|
along.
|
|
|
|
:type pick_up: bool
|
|
:param pick_up: Passed on to each call of complete_next().
|
|
:type halt_on_manual: bool
|
|
:param halt_on_manual: When True, this method will not attempt to
|
|
complete any tasks that have manual=True.
|
|
See :meth:`SpiffWorkflow.specs.TaskSpec.__init__`
|
|
"""
|
|
while self.run_next(pick_up, halt_on_manual):
|
|
pass
|
|
|
|
def get_dump(self):
|
|
"""
|
|
Returns a complete dump of the current internal task tree for
|
|
debugging.
|
|
|
|
:rtype: str
|
|
:returns: The debug information.
|
|
"""
|
|
return self.task_tree.get_dump()
|
|
|
|
def dump(self):
|
|
"""
|
|
Like :meth:`get_dump`, but prints the output to the terminal instead
|
|
of returning it.
|
|
"""
|
|
print(self.task_tree.dump())
|
|
|
|
def serialize(self, serializer, **kwargs):
|
|
"""
|
|
Serializes a Workflow instance using the provided serializer.
|
|
|
|
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
|
:param serializer: The serializer to use.
|
|
:type kwargs: dict
|
|
:param kwargs: Passed to the serializer.
|
|
:rtype: object
|
|
:returns: The serialized workflow.
|
|
"""
|
|
return serializer.serialize_workflow(self, **kwargs)
|
|
|
|
@classmethod
|
|
def deserialize(cls, serializer, s_state, **kwargs):
|
|
"""
|
|
Deserializes a Workflow instance using the provided serializer.
|
|
|
|
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
|
:param serializer: The serializer to use.
|
|
:type s_state: object
|
|
:param s_state: The serialized workflow.
|
|
:type kwargs: dict
|
|
:param kwargs: Passed to the serializer.
|
|
:rtype: Workflow
|
|
:returns: The workflow instance.
|
|
"""
|
|
return serializer.deserialize_workflow(s_state, **kwargs)
|