140 lines
5.1 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
# Copyright (C) 2007 Samuel Abels
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ..task import TaskState
from ..exceptions import WorkflowException
from ..operators import valueof
from ..specs.Join import Join
class ThreadMerge(Join):
"""
This class represents a task for synchronizing branches that were
previously split using a a ThreadSplit.
It has two or more incoming branches and one or more outputs.
"""
def __init__(self,
wf_spec,
name,
split_task,
**kwargs):
"""
Constructor.
:type wf_spec: :class:`SpiffWorkflow.specs.WorkflowSpec`
:param wf_spec: A reference to the parent (usually a workflow).
:type name: string
:param name: A name for the task.
:type split_task: str
:param split_task: The name of the task spec that was previously
used to split the branch.
:type kwargs: dict
:param kwargs: See :class:`SpiffWorkflow.specs.Join`.
"""
assert split_task is not None
Join.__init__(self, wf_spec, name, split_task, **kwargs)
def _start(self, my_task):
# If the threshold was already reached, there is nothing else to do.
if my_task._has_state(TaskState.COMPLETED):
return False
if my_task._has_state(TaskState.READY):
return True
# Retrieve a list of all activated tasks from the associated
# task that did the conditional parallel split.
split_task = my_task._find_ancestor_from_name(self.split_task)
if split_task is None:
msg = 'Join with %s, which was not reached' % self.split_task
Squashed 'SpiffWorkflow/' changes from 4195453a4..1f51db962 1f51db962 Merge pull request #283 from sartography/feature/better_errors 69fb4967e Patching up some bugs and logical disconnects as I test out the errors. cf5be0096 * Making a few more things consistent in the error messages -- so there isn't filename for validation errors, and file_name for WorkflowExceptions. Same for line_number vs sourceline. * Assure than an error_type is consistently set on exceptions. * ValidationExceptions should not bild up a detailed error message that replicates information available within it. 440ee16c8 Responding to some excellent suggestions from Elizabeth: 655e415e1 Merge pull request #282 from subhakarks/fix-workfowspec-dump 1f6d3cf4e Explain that the error happened in a pre-script or post script. 8119abd14 Added a top level SpiffWorklowException that all exceptions inherit from. Aside from a message string you can append information to these exceptions with "add_note", which is a new method that all exceptions have starting in python 3.11 Switched arguments to the WorkflowException, WorkflowTaskException - which now always takes a string message as the first argument, and named arguments thereafter to be consistent with all other error messages in Python. Consistently raise ValidationExceptions whenever we encounter an error anywhere during parsing of xml. The BPMN/WorkflowTaskExecException is removed, in favor of just calling a WorkflowTaskException. There is nothing BPMN Specific in the logic, so no need for this. Consolidated error message logic so that things like "Did you mean" just get added by default if possible. So we don't have to separately deal with that logic each time. Better Error messages for DMN (include row number as a part of the error information) 13463b5c5 fix for workflowspec dump be26100bc Merge pull request #280 from sartography/feature/remove-unused-bpmn-attributes-and-methods 23a5c1d70 remove 'entering_* methods 4e5875ec8 remove sequence flow 5eed83ab1 Merge pull request #278 from sartography/feature/remove-old-serializer 614f1c68a remove compact serializer and references e7e410d4a remove old serializer and references git-subtree-dir: SpiffWorkflow git-subtree-split: 1f51db962ccaed5810f5d0f7d76a932f056430ab
2023-01-19 10:47:07 -05:00
raise WorkflowException(msg, task_spec=self)
tasks = split_task.task_spec._get_activated_threads(split_task)
# The default threshold is the number of threads that were started.
threshold = valueof(my_task, self.threshold)
if threshold is None:
threshold = len(tasks)
# Look up which tasks have already completed.
waiting_tasks = []
completed = 0
for task in tasks:
# Refresh path prediction.
task.task_spec._predict(task)
if self._branch_is_complete(task):
completed += 1
else:
waiting_tasks.append(task)
# If the threshold was reached, get ready to fire.
if completed >= threshold:
# If this is a cancelling join, cancel all incoming branches,
# except for the one that just completed.
if self.cancel_remaining:
for task in waiting_tasks:
task.cancel()
return True
# We do NOT set the task state to COMPLETED, because in
# case all other incoming tasks get cancelled (or never reach
# the ThreadMerge for other reasons, such as reaching a stub branch),
# we need to revisit it.
return False
def _update_hook(self, my_task):
Squashed 'SpiffWorkflow/' changes from 11e4b4f96..b439f69f2 b439f69f2 Merge pull request #296 from sartography/bugfix/subprocess-access-to-data-objects 6d2a2031e update spiff subworkflow tasks too 992c38671 make data objects referenceable within subprocesses 6c8ff5cdf allow subprocesses & call activities to have different data copy policies 2b14f3a48 initialize subprocesses in _update_hook instead of _on_ready_before 791f335d5 Merge pull request #295 from sartography/improvement/remove-camunda-from-base-and-misc-cleanup 28b579beb remove a few unused, duplicative, and debugging methods 8f14d1098 remove some other unused diagrams and tests 408bc6734 rely on top level camunda parser for almost all namespace references 895b2cc9b remove camunda namespace from base bpmn parser 76ecbf7cc Merge pull request #294 from sartography/bugfix/reactivate-boundary-event 82b6c8ad4 hack to ensure timers (and other events) are reset if returned to via loop reset 590903f47 Merge pull request #292 from sartography/feature/multiinstance-refactor 537490043 fix bug & typo f31726db1 raise error on attempting to migrate workflows with MI 44e6d08d8 create spiff multiinstance task 2168c022b create camunda MI that approximates what it used to do 9894cea59 some improvements and bugfixes f857ad5d4 remove some now unused functionality & tests, create a few more tests 6fead9d04 updated serializer & fixes for most tests ec662ecdd add parallel multiinstance bd19b2a8a working sequential multiinstance 2f9c192b6 further cleanup around _update_hook 947792bf6 fix bug in exclusive gateway migration d3d87b28d add io spec to all tasks f1586e275 add support for standard loop tasks git-subtree-dir: SpiffWorkflow git-subtree-split: b439f69f23b547df4de1e8e0c636997f2fd4e33b
2023-02-18 10:32:56 -05:00
my_task._inherit_data()
if not self._start(my_task):
my_task._set_state(TaskState.WAITING)
return
split_task_spec = my_task.workflow.get_task_spec_from_name(
self.split_task)
split_task = my_task._find_ancestor(split_task_spec)
# Find the inbound task that was completed last.
last_changed = None
tasks = []
for task in split_task._find_any(self):
if self.split_task and task._is_descendant_of(my_task):
continue
changed = task.parent.last_state_change
if last_changed is None \
or changed > last_changed.parent.last_state_change:
last_changed = task
tasks.append(task)
# Mark all tasks in this thread that reference this task as
# completed, except for the first one, which should be READY.
for task in tasks:
if task == last_changed:
self.entered_event.emit(my_task.workflow, my_task)
task._ready()
else:
task._set_state(TaskState.COMPLETED)
task._drop_children()
def serialize(self, serializer):
return serializer.serialize_thread_merge(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_thread_merge(wf_spec, s_state)