From 05e226fcabf9bbe6d9ca9816cd2df827926b619a Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 14 Feb 2023 16:51:09 -0500 Subject: [PATCH] Squashed 'SpiffWorkflow/' changes from 0e61be85c..11e4b4f96 11e4b4f96 fix two incorrectly names attributes in node parser abec918a8 Merge pull request #291 from rachfop/fix-grammar a597f9ce9 Fixes grammar, typos, and spellings 00ffaf067 Assure that when something goes wrong calling a service task that we get as much good information about the problem as possible. c044b5646 Fix that dreadful unknown "KeyError" exception that was cropping up. Adding a bit of detail to the spiffworkflow exceptions when a duplicate process model is found. Disable the submit button on tasks after you click submit (avoid the double click and give users a better experience) git-subtree-dir: SpiffWorkflow git-subtree-split: 11e4b4f96f03a036bd29632f1560e347a4e69aae --- SpiffWorkflow/bpmn/parser/BpmnParser.py | 6 +- SpiffWorkflow/bpmn/parser/node_parser.py | 4 +- SpiffWorkflow/spiff/specs/service_task.py | 13 +- doc/bpmn/advanced.rst | 18 +-- doc/bpmn/events.rst | 6 +- doc/bpmn/gateways.rst | 2 +- doc/bpmn/index.rst | 8 +- doc/bpmn/multiinstance.rst | 2 +- doc/bpmn/organization.rst | 26 ++-- doc/bpmn/spiff-extensions.rst | 10 +- doc/bpmn/synthesis.rst | 32 ++--- doc/bpmn/tasks.rst | 3 +- doc/intro.rst | 5 +- doc/non-bpmn/custom-tasks/index.rst | 2 +- doc/non-bpmn/index.rst | 4 +- doc/non-bpmn/patterns.rst | 154 +++++++++++----------- 16 files changed, 149 insertions(+), 146 deletions(-) diff --git a/SpiffWorkflow/bpmn/parser/BpmnParser.py b/SpiffWorkflow/bpmn/parser/BpmnParser.py index 6b98bb8a..7741c801 100644 --- a/SpiffWorkflow/bpmn/parser/BpmnParser.py +++ b/SpiffWorkflow/bpmn/parser/BpmnParser.py @@ -48,7 +48,7 @@ from .task_parsers import ( GatewayParser, ConditionalGatewayParser, CallActivityParser, - ScriptTaskParser, + ScriptTaskParser, SubWorkflowParser, ) from .event_parsers import ( @@ -254,9 +254,9 @@ class BpmnParser(object): def create_parser(self, node, filename=None, lane=None): parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, filename=filename, lane=lane) if parser.get_id() in self.process_parsers: - raise ValidationException('Duplicate process ID', node=node, file_name=filename) + raise ValidationException(f'Duplicate process ID: {parser.get_id()}', node=node, file_name=filename) if parser.get_name() in self.process_parsers_by_name: - raise ValidationException('Duplicate process name', node=node, file_name=filename) + raise ValidationException(f'Duplicate process name: {parser.get_name()}', node=node, file_name=filename) self.process_parsers[parser.get_id()] = parser self.process_parsers_by_name[parser.get_name()] = parser diff --git a/SpiffWorkflow/bpmn/parser/node_parser.py b/SpiffWorkflow/bpmn/parser/node_parser.py index 05552035..0b1c8ed4 100644 --- a/SpiffWorkflow/bpmn/parser/node_parser.py +++ b/SpiffWorkflow/bpmn/parser/node_parser.py @@ -46,7 +46,7 @@ class NodeParser: if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects: specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')]) else: - raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.file_name) + raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.filename) return specs def parse_outgoing_data_references(self): @@ -56,7 +56,7 @@ class NodeParser: if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects: specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')]) else: - raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.file_name) + raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename) return specs def parse_extensions(self, node=None): diff --git a/SpiffWorkflow/spiff/specs/service_task.py b/SpiffWorkflow/spiff/specs/service_task.py index 80e3f345..c31e2fb5 100644 --- a/SpiffWorkflow/spiff/specs/service_task.py +++ b/SpiffWorkflow/spiff/specs/service_task.py @@ -1,6 +1,7 @@ from copy import deepcopy import json from SpiffWorkflow.bpmn.specs.ServiceTask import ServiceTask +from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.spiff.specs.spiff_task import SpiffBpmnTask class ServiceTask(SpiffBpmnTask, ServiceTask): @@ -31,9 +32,13 @@ class ServiceTask(SpiffBpmnTask, ServiceTask): operation_params_copy = deepcopy(self.operation_params) evaluated_params = {k: evaluate(v) for k, v in operation_params_copy.items()} - result = task.workflow.script_engine.call_service(self.operation_name, - evaluated_params, task.data) - + try: + result = task.workflow.script_engine.call_service(self.operation_name, + evaluated_params, task.data) + except Exception as e: + wte = WorkflowTaskException("Error executing Service Task", + task=task, exception=e) + wte.add_note(str(e)) + raise wte parsed_result = json.loads(result) - task.data[self._result_variable(task)] = parsed_result diff --git a/doc/bpmn/advanced.rst b/doc/bpmn/advanced.rst index d6442b87..60848ef5 100644 --- a/doc/bpmn/advanced.rst +++ b/doc/bpmn/advanced.rst @@ -7,7 +7,7 @@ Filtering Tasks In our earlier example, all we did was check the lane a task was in and display it along with the task name and state. -Lets take a look at a sample workflow with lanes: +Let's take a look at a sample workflow with lanes: .. figure:: figures/lanes.png :scale: 30% @@ -15,7 +15,7 @@ Lets take a look at a sample workflow with lanes: Workflow with lanes -To get all of the tasks that are ready for the 'Customer' workflow, we could +To get all the tasks that are ready for the 'Customer' workflow, we could specify the lane when retrieving ready user tasks: .. code:: python @@ -50,14 +50,14 @@ Logging Spiff provides several loggers: - the :code:`spiff` logger, which emits messages when a workflow is initialized and when tasks change state - the :code:`spiff.metrics` logger, which emits messages containing the elapsed duration of tasks - - the :code:`spiff.data` logger, which emits message when task or workflow data is updated. + - the :code:`spiff.data` logger, which emits a message when task or workflow data is updated. Log level :code:`INFO` will provide reasonably detailed information about state changes. As usual, log level :code:`DEBUG` will probably provide more logs than you really want to see, but the logs will contain the task and task internal data. -Data can be included at any level less than :code:`INFO`. In our exmple application, +Data can be included at any level less than :code:`INFO`. In our example application, we define a custom log level .. code:: python @@ -76,7 +76,7 @@ Serialization Serialization Changed in Version 1.1.7. Support for pre-1.1.7 serialization will be dropped in a future release. - The old serialization method still works but it is deprecated. + The old serialization method still works, but it is deprecated. To migrate your system to the new version, see "Migrating between serialization versions" below. @@ -131,7 +131,7 @@ To restore the workflow: with open(args.restore) as state: wf = serializer.deserialize_json(state.read()) -The workflow serializer is designed to be flexible and modular and as such is a little complicated. It has +The workflow serializer is designed to be flexible and modular, and as such is a little complicated. It has two components: - a workflow spec converter (which handles workflow and task specs) @@ -141,7 +141,7 @@ The default workflow spec converter likely to meet your needs, either on its own :code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` or :code:`spiff` and :code:`dmn` subpackages of this library, and all you'll need to do is add them to the list of task converters, as we did above. -However, he default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime` +However, the default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime` and :code:`timedelta` objects (we make these available in our default script engine) and UUIDs. If your workflow or task data contains objects that are not JSON-serializable, you'll need to extend ours, or extend its base class to create one of your own. @@ -245,7 +245,7 @@ The code would then look more like this: Because the serializer is highly customizable, we've made it possible for you to manage your own versions of the serialization. You can do this by passing a version number into the serializer, which will be embedded in the -json of all workflows. This allow you to modify the serialization and customize it over time, and still manage +json of all workflows. This allows you to modify the serialization and customize it over time, and still manage the different forms as you make adjustments without leaving people behind. Versioned Serializer @@ -273,7 +273,7 @@ security reasons. and :code:`exec`! If you have security concerns, you should definitely investigate replacing the default with your own implementation. -We'll cover a simple extension of custom script engine here. There is also an examples of +We'll cover a simple extension of custom script engine here. There is also an example of a similar engine based on `RestrictedPython `_ included alongside this example. diff --git a/doc/bpmn/events.rst b/doc/bpmn/events.rst index 121528e2..a2b32f4c 100644 --- a/doc/bpmn/events.rst +++ b/doc/bpmn/events.rst @@ -31,7 +31,7 @@ We'll include examples of all of these types in this section. Transactions ^^^^^^^^^^^^ -We also need to introduce the concept of a Transaction, bceause certain events +We also need to introduce the concept of a Transaction because certain events can only be used in that context. A Transaction is essentially a subprocess, but it must fully complete before it affects its outer workflow. @@ -147,7 +147,7 @@ this tutorial. We ask the Employee to verify that they were able to retrieve the product; if they were unable to do so, then we generate an Error End Event, which we will handle -with an Interrupting Error Boundary Event (Error events are *always* Interrupting). +with an Interrupting Error Boundary Event (Error events are *always* interrupting). If the product is unavailable, our Manager will notify the customer, issue a refund, and cancel the order. @@ -161,7 +161,7 @@ Event, you'll have to use Escalation, because BPMN does not allow Intermediate E and that Error Events cannot be Non-Interrupting. In our example, we'll assume that if we failed to ship the product, we can try again later, -so we will not end the Subprocess (Escalation events can be either Interrupting or +so, we will not end the Subprocess (Escalation events can be either Interrupting or Non-Interrupting). However, we still want to notify our customer of a delay, so we use a Non-Interrupting diff --git a/doc/bpmn/gateways.rst b/doc/bpmn/gateways.rst index 7b926533..f715ae1b 100644 --- a/doc/bpmn/gateways.rst +++ b/doc/bpmn/gateways.rst @@ -23,7 +23,7 @@ Exclusive Gateway Exclusive gateways are used when exactly one alternative can be selected. Suppose our products are T-shirts and we offer product C in several colors. After -the user selects a product, we check to see it if is customizable. Our default +the user selects a product, we check to see it if is customizable. Our default branch will be 'Not Customizable', but we'll direct the user to a second form if they select 'C'; our condition for choosing this branch is a simple python expression. diff --git a/doc/bpmn/index.rst b/doc/bpmn/index.rst index 8cd5f7a0..8364834e 100644 --- a/doc/bpmn/index.rst +++ b/doc/bpmn/index.rst @@ -1,7 +1,7 @@ BPMN Workflows ============== -The basic idea of SpiffWorkflow is that you can use it to write an interpreter +The basic idea of SpiffWorkflow is that you can use it to write an interpreter in Python that creates business applications from BPMN models. In this section, we'll develop a model of an example process and as well as a simple workflow runner. @@ -11,8 +11,8 @@ We expect that readers will fall into two general categories: - People with a background in BPMN who might not be very familiar Python - Python developers who might not know much about BPMN -This section of the documentation provides an example that (hopefully) serves -the needs of both groups. We will introduce the BPMN elements that SpiffWorkflow +This section of the documentation provides an example that (hopefully) serves +the needs of both groups. We will introduce the BPMN elements that SpiffWorkflow supports and show how to build a simple workflow runner around them. SpiffWorkflow does heavy-lifting such as keeping track of task dependencies and @@ -29,7 +29,7 @@ Quickstart Check out the code in `spiff-example-cli `_ and follow the instructions to set up an environment to run it in. -Run the sample workflow we built up using our example application with the following +Run the sample workflow we built up using our example application with the following command: .. code-block:: console diff --git a/doc/bpmn/multiinstance.rst b/doc/bpmn/multiinstance.rst index 7cdb6f1a..03e36f28 100644 --- a/doc/bpmn/multiinstance.rst +++ b/doc/bpmn/multiinstance.rst @@ -28,7 +28,7 @@ selections in a collection. Selecting more than one product -We'll also need to update our element docmentation to display all products. +We'll also need to update our element documentation to display all products. .. figure:: figures/documentation_multi.png :scale: 30% diff --git a/doc/bpmn/organization.rst b/doc/bpmn/organization.rst index 261b30cf..ad78be86 100644 --- a/doc/bpmn/organization.rst +++ b/doc/bpmn/organization.rst @@ -1,4 +1,4 @@ -Organizing More Complex Workflows +Organizing More Complex Workflows ================================= BPMN Model @@ -43,20 +43,20 @@ For a simple code example of displaying a tasks lane, see `Handling Lanes`_ Subprocesses ^^^^^^^^^^^^ -In general, subprocesses are a way of grouping work into smaller units. This, in -theory, will help us to re-use sections of business logic, but it will also allow +In general, subprocesses are a way of grouping work into smaller units. This, in +theory, will help us to re-use sections of business logic, but it will also allow us to treat groups of work as a unit. -Subprocesses come in two different flavors. In this workflow we see an Expanded -Subprocess. Unfortunately, we can't collapse an expanded subprocess within BPMN.js, +Subprocesses come in two different flavors. In this workflow we see an Expanded +Subprocess. Unfortunately, we can't collapse an expanded subprocess within BPMN.js, so expanded subprocesses are mainly useful for conceptualizing a group of tasks as -a unit. +a unit. -It also possible to refer to external subprocesses via a Call Activity Task. This -allows us to 'call' a separate workflow in a different file by referencing the ID of +It also possible to refer to external subprocesses via a Call Activity Task. This +allows us to 'call' a separate workflow in a different file by referencing the ID of the called workflow, which can simplify business logic and make it re-usable. -We'll expand 'Fulfill Order' into sub tasks -- retrieving the product and shipping +We'll expand 'Fulfill Order' into sub tasks -- retrieving the product and shipping the order -- and create an Expanded Subprocess. We'll also expand our selection of products, adding several new products and the ability @@ -68,14 +68,14 @@ to customize certain products by size and style in addition to color. Updated Product List -.. note:: +.. note:: I've added what customizations are available for each product in the 'Annotations' column of the DMN table. This is not actually used by Spiff; it simply provides the option of documenting the decisions contained in the table. -Since adding gateways for navigating the new options will add a certain amount of -clutter to our diagram, we'll create a separate workflow around selecting and +Since adding gateways for navigating the new options will add a certain amount of +clutter to our diagram, we'll create a separate workflow around selecting and customizing products and refer to that in our main workflow. .. figure:: figures/call_activity.png @@ -116,7 +116,7 @@ our sample application, we'll simply display which lane a task belongs to. .. code:: python if hasattr(task.task_spec, 'lane') and task.task_spec.lane is not None: - lane = f'[{task.task_spec.lane}]' + lane = f'[{task.task_spec.lane}]' else: lane = '' diff --git a/doc/bpmn/spiff-extensions.rst b/doc/bpmn/spiff-extensions.rst index 5a64b5b6..00cb395b 100644 --- a/doc/bpmn/spiff-extensions.rst +++ b/doc/bpmn/spiff-extensions.rst @@ -17,7 +17,7 @@ instead of the `run.py `_. -Our example application contains two different workflow runners, one that uses tasks with +Our example application contains two different workflow runners, one that uses tasks with Camunda extensions (`run.py `_) and one -that uses tasks with Spiff extensions +that uses tasks with Spiff extensions (`run-spiff.py `_). -Most of the workflow operations will not change, so shared functions are defined in +Most of the workflow operations will not change, so shared functions are defined in `utils.py `_. The primary difference is handling user tasks. Spiff User Tasks define an extensions @@ -23,7 +23,7 @@ Loading a Workflow ------------------- The :code:`CamundaParser` extends the base :code:`BpmnParser`, adding functionality for -parsing forms defined in Camunda User Tasks and decision tables defined in Camunda +parsing forms defined in Camunda User Tasks and decision tables defined in Camunda Business Rule Tasks. (There is a similar :code:`SpiffBpmnParser` used by the alternate runner.) @@ -52,23 +52,23 @@ Our workflow parser looks like this; We'll obtain the workflow specification from the parser for the top level process using :code:`parser.get_spec()`. -We have two options for finding subprocess specs. The method :code:`parser.find_all_specs()` -will create specs for all executable processes found in every file supplied. The method -:code:`parser.get_subprocess_specs(process)` will create specs only for processes used by -the specified process. Both search recursively for subprocesses; the only difference is +We have two options for finding subprocess specs. The method :code:`parser.find_all_specs()` +will create specs for all executable processes found in every file supplied. The method +:code:`parser.get_subprocess_specs(process)` will create specs only for processes used by +the specified process. Both search recursively for subprocesses; the only difference is the latter method limits the search start to the specified process. -Our examples are pretty simple and we're not loading any extraneous stuff, so we'll +Our examples are pretty simple, and we're not loading any extraneous stuff, so we'll just always load everything. If your entire workflow is contained in your top-level -process, you can omit the :code:`subprocess` argument, but if your workflow contains -call activities, you'll need to use one of these methods to find the models for any +process, you can omit the :code:`subprocess` argument, but if your workflow contains +call activities, you'll need to use one of these methods to find the models for any called processes. We also provide an enhanced script engine to our workflow. More information about how and why you might want to do this is covered in :doc:`advanced`. The :code:`script_engine` argument is optional and the default will be used if none is supplied. -We return :code:`BpmnWorkflow` that runs our top-level workflow and contains specs for any +We return :code:`BpmnWorkflow` that runs our top-level workflow and contains specs for any subprocesses defined by that workflow. Defining Task Handlers @@ -91,7 +91,7 @@ We create a mapping of task type to handler, which we'll pass to our workflow ru This might not be a step you would need to do in an application you build, since you would likely have only one set of task specs that need to be parsed, handled, and -serialized; however our `run` method is an awful lot of code to maintain in two separate +serialized; however, our `run` method is an awful lot of code to maintain in two separate files. Running a Workflow @@ -180,10 +180,10 @@ Examining the Workflow State ---------------------------- When this application is run and we want to present steps to the user, we'll need -to be able to examine the workflow and task states and associated data. We'll cover +to be able to examine the workflow and task states and associated data. We'll cover the basics of this in this section. -The code below is a simple method for displaying information about a task. We use +The code below is a simple method for displaying information about a task. We use this in two ways - presenting a list of tasks to a user (in this case the state will always be ready, so we won't include it) @@ -233,7 +233,7 @@ We'll print information about our task as described above, as well as a dump of We can get a list of all tasks regardless of type or state with :code:`workflow.get_tasks()`. The actual list of tasks will get quite long (some tasks are expanded internally by Spiff into -multiple tasks, and all gateways and events are also treated as "tasks"). So we're filtering +multiple tasks, and all gateways and events are also treated as "tasks"). So we're filtering the tasks to only display the ones that would have salience to a user here. We'll further filter those tasks for :code:`READY` and :code:`WAITING` tasks for a more diff --git a/doc/bpmn/tasks.rst b/doc/bpmn/tasks.rst index b00c9160..5c25e2d3 100644 --- a/doc/bpmn/tasks.rst +++ b/doc/bpmn/tasks.rst @@ -171,7 +171,7 @@ Our :code:`select_option` function simply repeats the prompt until the user enters a value contained in the option list. For other fields, we'll just store whatever the user enters, although in the case -where they data type was specified to be a :code:`long`, we'll convert it to a +where the data type was specified to be a :code:`long`, we'll convert it to a number. Finally, we need to explicitly store the user-provided response in a variable @@ -219,4 +219,3 @@ The template string can be obtained from :code:`task.task_spec.documentation`. As noted above, our template class comes from Jinja. We render the template using the task data, which is just a dictionary. - diff --git a/doc/intro.rst b/doc/intro.rst index 7077814e..760f5b67 100644 --- a/doc/intro.rst +++ b/doc/intro.rst @@ -64,7 +64,7 @@ The following example also has one task, represented by the rectangle with curve The sequence flow is represented with a solid line connector. When the node at -the tail of a sequence flow completes, the node at the arrowhead is enabled to start. +the tail of a sequence flow completes, the node at the arrowhead is enabled to start. A More Complicated Workflow @@ -78,7 +78,7 @@ A More Complicated Workflow In this example, the diamond shape is called a gateway. It represents a branch -point in our flow. This gateway is an exclusive data-based gateway (also +point in our flow. This gateway is an exclusive data-based gateway (also called an XOR gateway). With an exclusive gateway, you must take one path or the other based on some data condition. BPMN has other gateway types. @@ -122,4 +122,3 @@ attached to will be cancelled if the event is received) or Non-Interrupting (in which case the task will continue). In both cases, flows may emanate from the Boundary Event, which will trigger those paths if the events occur while the task is being executed. - diff --git a/doc/non-bpmn/custom-tasks/index.rst b/doc/non-bpmn/custom-tasks/index.rst index 3a41fdad..916a148b 100644 --- a/doc/non-bpmn/custom-tasks/index.rst +++ b/doc/non-bpmn/custom-tasks/index.rst @@ -4,7 +4,7 @@ Implementing Custom Tasks Introduction ------------ -In this second tutorial we are going to implement our own task, and +In this second tutorial, we are going to implement our own task, and use serialization and deserialization to store and restore it. If you haven't already, you should complete the first diff --git a/doc/non-bpmn/index.rst b/doc/non-bpmn/index.rst index 60b763ef..3f3f36bc 100644 --- a/doc/non-bpmn/index.rst +++ b/doc/non-bpmn/index.rst @@ -1,7 +1,7 @@ Non-BPMN support ================ -We have maintained support for legacy non-BPMN workflows, but we recommend using +We have maintained support for legacy non-BPMN workflows, but we recommend using SpiffWorkflow with BPMN, as this is where current development is focused. .. toctree:: @@ -9,4 +9,4 @@ SpiffWorkflow with BPMN, as this is where current development is focused. tutorial/index custom-tasks/index - patterns \ No newline at end of file + patterns diff --git a/doc/non-bpmn/patterns.rst b/doc/non-bpmn/patterns.rst index 088e1c63..0b659631 100644 --- a/doc/non-bpmn/patterns.rst +++ b/doc/non-bpmn/patterns.rst @@ -1,77 +1,77 @@ -.. _patterns: - -Supported Workflow Patterns -=========================== - -.. HINT:: - All examples are located - `here `_. - -Control-Flow Patterns ---------------------- - -1. Sequence [control-flow/sequence.xml] -2. Parallel Split [control-flow/parallel_split.xml] -3. Synchronization [control-flow/synchronization.xml] -4. Exclusive Choice [control-flow/exclusive_choice.xml] -5. Simple Merge [control-flow/simple_merge.xml] -6. Multi-Choice [control-flow/multi_choice.xml] -7. Structured Synchronizing Merge [control-flow/structured_synchronizing_merge.xml] -8. Multi-Merge [control-flow/multi_merge.xml] -9. Structured Discriminator [control-flow/structured_discriminator.xml] -10. Arbitrary Cycles [control-flow/arbitrary_cycles.xml] -11. Implicit Termination [control-flow/implicit_termination.xml] -12. Multiple Instances without Synchronization [control-flow/multi_instance_without_synch.xml] -13. Multiple Instances with a Priori Design-Time Knowledge [control-flow/multi_instance_with_a_priori_design_time_knowledge.xml] -14. Multiple Instances with a Priori Run-Time Knowledge [control-flow/multi_instance_with_a_priori_run_time_knowledge.xml] -15. Multiple Instances without a Priori Run-Time Knowledge [control-flow/multi_instance_without_a_priori.xml] -16. Deferred Choice [control-flow/deferred_choice.xml] -17. Interleaved Parallel Routing [control-flow/interleaved_parallel_routing.xml] -18. Milestone [control-flow/milestone.xml] -19. Cancel Task [control-flow/cancel_task.xml] -20. Cancel Case [control-flow/cancel_case.xml] -21. *NOT IMPLEMENTED* -22. Recursion [control-flow/recursion.xml] -23. Transient Trigger [control-flow/transient_trigger.xml] -24. Persistent Trigger [control-flow/persistent_trigger.xml] -25. Cancel Region [control-flow/cancel_region.xml] -26. Cancel Multiple Instance Task [control-flow/cancel_multi_instance_task.xml] -27. Complete Multiple Instance Task [control-flow/complete_multiple_instance_activity.xml] -28. Blocking Discriminator [control-flow/blocking_discriminator.xml] -29. Cancelling Discriminator [control-flow/cancelling_discriminator.xml] -30. Structured Partial Join [control-flow/structured_partial_join.xml] -31. Blocking Partial Join [control-flow/blocking_partial_join.xml] -32. Cancelling Partial Join [control-flow/cancelling_partial_join.xml] -33. Generalized AND-Join [control-flow/generalized_and_join.xml] -34. Static Partial Join for Multiple Instances [control-flow/static_partial_join_for_multi_instance.xml] -35. Cancelling Partial Join for Multiple Instances [control-flow/cancelling_partial_join_for_multi_instance.xml] -36. Dynamic Partial Join for Multiple Instances [control-flow/dynamic_partial_join_for_multi_instance.xml] -37. Acyclic Synchronizing Merge [control-flow/acyclic_synchronizing_merge.xml] -38. General Synchronizing Merge [control-flow/general_synchronizing_merge.xml] -39. Critical Section [control-flow/critical_section.xml] -40. Interleaved Routing [control-flow/interleaved_routing.xml] -41. Thread Merge [control-flow/thread_merge.xml] -42. Thread Split [control-flow/thread_split.xml] -43. Explicit Termination [control-flow/explicit_termination.xml] - -Workflow Data Patterns ----------------------- - -1. Task Data [data/task_data.xml] -2. Block Data [data/block_data.xml] -3. *NOT IMPLEMENTED* -4. *NOT IMPLEMENTED* -5. *NOT IMPLEMENTED* -6. *NOT IMPLEMENTED* -7. *NOT IMPLEMENTED* -8. *NOT IMPLEMENTED* -9. Task to Task [data/task_to_task.xml] -10. Block Task to Sub-Workflow Decomposition [data/block_to_subworkflow.xml] -11. Sub-Workflow Decomposition to Block Task [data/subworkflow_to_block.xml] - -Specs that have no corresponding workflow pattern on workflowpatterns.com -------------------------------------------------------------------------- - -- Execute - spawns a subprocess and waits for the results -- Transform - executes commands that can be used for data transforms -- Celery - executes a Celery task (see http://celeryproject.org/) +.. _patterns: + +Supported Workflow Patterns +=========================== + +.. HINT:: + All examples are located + `here `_. + +Control-Flow Patterns +--------------------- + +1. Sequence [control-flow/sequence.xml] +2. Parallel Split [control-flow/parallel_split.xml] +3. Synchronization [control-flow/synchronization.xml] +4. Exclusive Choice [control-flow/exclusive_choice.xml] +5. Simple Merge [control-flow/simple_merge.xml] +6. Multi-Choice [control-flow/multi_choice.xml] +7. Structured Synchronizing Merge [control-flow/structured_synchronizing_merge.xml] +8. Multi-Merge [control-flow/multi_merge.xml] +9. Structured Discriminator [control-flow/structured_discriminator.xml] +10. Arbitrary Cycles [control-flow/arbitrary_cycles.xml] +11. Implicit Termination [control-flow/implicit_termination.xml] +12. Multiple Instances without Synchronization [control-flow/multi_instance_without_synch.xml] +13. Multiple Instances with a Priori Design-Time Knowledge [control-flow/multi_instance_with_a_priori_design_time_knowledge.xml] +14. Multiple Instances with a Priori Run-Time Knowledge [control-flow/multi_instance_with_a_priori_run_time_knowledge.xml] +15. Multiple Instances without a Priori Run-Time Knowledge [control-flow/multi_instance_without_a_priori.xml] +16. Deferred Choice [control-flow/deferred_choice.xml] +17. Interleaved Parallel Routing [control-flow/interleaved_parallel_routing.xml] +18. Milestone [control-flow/milestone.xml] +19. Cancel Task [control-flow/cancel_task.xml] +20. Cancel Case [control-flow/cancel_case.xml] +21. *NOT IMPLEMENTED* +22. Recursion [control-flow/recursion.xml] +23. Transient Trigger [control-flow/transient_trigger.xml] +24. Persistent Trigger [control-flow/persistent_trigger.xml] +25. Cancel Region [control-flow/cancel_region.xml] +26. Cancel Multiple Instance Task [control-flow/cancel_multi_instance_task.xml] +27. Complete Multiple Instance Task [control-flow/complete_multiple_instance_activity.xml] +28. Blocking Discriminator [control-flow/blocking_discriminator.xml] +29. Cancelling Discriminator [control-flow/cancelling_discriminator.xml] +30. Structured Partial Join [control-flow/structured_partial_join.xml] +31. Blocking Partial Join [control-flow/blocking_partial_join.xml] +32. Cancelling Partial Join [control-flow/cancelling_partial_join.xml] +33. Generalized AND-Join [control-flow/generalized_and_join.xml] +34. Static Partial Join for Multiple Instances [control-flow/static_partial_join_for_multi_instance.xml] +35. Cancelling Partial Join for Multiple Instances [control-flow/cancelling_partial_join_for_multi_instance.xml] +36. Dynamic Partial Join for Multiple Instances [control-flow/dynamic_partial_join_for_multi_instance.xml] +37. Acyclic Synchronizing Merge [control-flow/acyclic_synchronizing_merge.xml] +38. General Synchronizing Merge [control-flow/general_synchronizing_merge.xml] +39. Critical Section [control-flow/critical_section.xml] +40. Interleaved Routing [control-flow/interleaved_routing.xml] +41. Thread Merge [control-flow/thread_merge.xml] +42. Thread Split [control-flow/thread_split.xml] +43. Explicit Termination [control-flow/explicit_termination.xml] + +Workflow Data Patterns +---------------------- + +1. Task Data [data/task_data.xml] +2. Block Data [data/block_data.xml] +3. *NOT IMPLEMENTED* +4. *NOT IMPLEMENTED* +5. *NOT IMPLEMENTED* +6. *NOT IMPLEMENTED* +7. *NOT IMPLEMENTED* +8. *NOT IMPLEMENTED* +9. Task to Task [data/task_to_task.xml] +10. Block Task to Sub-Workflow Decomposition [data/block_to_subworkflow.xml] +11. Sub-Workflow Decomposition to Block Task [data/subworkflow_to_block.xml] + +Specs that have no corresponding workflow pattern on workflowpatterns.com +------------------------------------------------------------------------- + +- Execute - spawns a subprocess and waits for the results +- Transform - executes commands that can be used for data transforms +- Celery - executes a Celery task (see http://celeryproject.org/)