mirror of
https://github.com/status-im/spiff-arena.git
synced 2025-02-28 17:30:35 +00:00
Merge branch 'main' of github.com:sartography/spiff-arena
This commit is contained in:
commit
7ab36f7155
@ -46,7 +46,7 @@ class NodeParser:
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
else:
|
||||
raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.file_name)
|
||||
raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.filename)
|
||||
return specs
|
||||
|
||||
def parse_outgoing_data_references(self):
|
||||
@ -56,7 +56,7 @@ class NodeParser:
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
else:
|
||||
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.file_name)
|
||||
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename)
|
||||
return specs
|
||||
|
||||
def parse_extensions(self, node=None):
|
||||
|
@ -7,7 +7,7 @@ Filtering Tasks
|
||||
In our earlier example, all we did was check the lane a task was in and display
|
||||
it along with the task name and state.
|
||||
|
||||
Lets take a look at a sample workflow with lanes:
|
||||
Let's take a look at a sample workflow with lanes:
|
||||
|
||||
.. figure:: figures/lanes.png
|
||||
:scale: 30%
|
||||
@ -15,7 +15,7 @@ Lets take a look at a sample workflow with lanes:
|
||||
|
||||
Workflow with lanes
|
||||
|
||||
To get all of the tasks that are ready for the 'Customer' workflow, we could
|
||||
To get all the tasks that are ready for the 'Customer' workflow, we could
|
||||
specify the lane when retrieving ready user tasks:
|
||||
|
||||
.. code:: python
|
||||
@ -50,14 +50,14 @@ Logging
|
||||
Spiff provides several loggers:
|
||||
- the :code:`spiff` logger, which emits messages when a workflow is initialized and when tasks change state
|
||||
- the :code:`spiff.metrics` logger, which emits messages containing the elapsed duration of tasks
|
||||
- the :code:`spiff.data` logger, which emits message when task or workflow data is updated.
|
||||
- the :code:`spiff.data` logger, which emits a message when task or workflow data is updated.
|
||||
|
||||
Log level :code:`INFO` will provide reasonably detailed information about state changes.
|
||||
|
||||
As usual, log level :code:`DEBUG` will probably provide more logs than you really want
|
||||
to see, but the logs will contain the task and task internal data.
|
||||
|
||||
Data can be included at any level less than :code:`INFO`. In our exmple application,
|
||||
Data can be included at any level less than :code:`INFO`. In our example application,
|
||||
we define a custom log level
|
||||
|
||||
.. code:: python
|
||||
@ -76,7 +76,7 @@ Serialization
|
||||
|
||||
Serialization Changed in Version 1.1.7.
|
||||
Support for pre-1.1.7 serialization will be dropped in a future release.
|
||||
The old serialization method still works but it is deprecated.
|
||||
The old serialization method still works, but it is deprecated.
|
||||
To migrate your system to the new version, see "Migrating between
|
||||
serialization versions" below.
|
||||
|
||||
@ -131,7 +131,7 @@ To restore the workflow:
|
||||
with open(args.restore) as state:
|
||||
wf = serializer.deserialize_json(state.read())
|
||||
|
||||
The workflow serializer is designed to be flexible and modular and as such is a little complicated. It has
|
||||
The workflow serializer is designed to be flexible and modular, and as such is a little complicated. It has
|
||||
two components:
|
||||
|
||||
- a workflow spec converter (which handles workflow and task specs)
|
||||
@ -141,7 +141,7 @@ The default workflow spec converter likely to meet your needs, either on its own
|
||||
:code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` or :code:`spiff` and :code:`dmn` subpackages
|
||||
of this library, and all you'll need to do is add them to the list of task converters, as we did above.
|
||||
|
||||
However, he default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime`
|
||||
However, the default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime`
|
||||
and :code:`timedelta` objects (we make these available in our default script engine) and UUIDs. If your
|
||||
workflow or task data contains objects that are not JSON-serializable, you'll need to extend ours, or extend
|
||||
its base class to create one of your own.
|
||||
@ -245,7 +245,7 @@ The code would then look more like this:
|
||||
|
||||
Because the serializer is highly customizable, we've made it possible for you to manage your own versions of the
|
||||
serialization. You can do this by passing a version number into the serializer, which will be embedded in the
|
||||
json of all workflows. This allow you to modify the serialization and customize it over time, and still manage
|
||||
json of all workflows. This allows you to modify the serialization and customize it over time, and still manage
|
||||
the different forms as you make adjustments without leaving people behind.
|
||||
|
||||
Versioned Serializer
|
||||
@ -273,7 +273,7 @@ security reasons.
|
||||
and :code:`exec`! If you have security concerns, you should definitely investigate
|
||||
replacing the default with your own implementation.
|
||||
|
||||
We'll cover a simple extension of custom script engine here. There is also an examples of
|
||||
We'll cover a simple extension of custom script engine here. There is also an example of
|
||||
a similar engine based on `RestrictedPython <https://restrictedpython.readthedocs.io/en/latest/>`_
|
||||
included alongside this example.
|
||||
|
||||
|
@ -31,7 +31,7 @@ We'll include examples of all of these types in this section.
|
||||
Transactions
|
||||
^^^^^^^^^^^^
|
||||
|
||||
We also need to introduce the concept of a Transaction, bceause certain events
|
||||
We also need to introduce the concept of a Transaction because certain events
|
||||
can only be used in that context. A Transaction is essentially a subprocess, but
|
||||
it must fully complete before it affects its outer workflow.
|
||||
|
||||
@ -147,7 +147,7 @@ this tutorial.
|
||||
|
||||
We ask the Employee to verify that they were able to retrieve the product; if they
|
||||
were unable to do so, then we generate an Error End Event, which we will handle
|
||||
with an Interrupting Error Boundary Event (Error events are *always* Interrupting).
|
||||
with an Interrupting Error Boundary Event (Error events are *always* interrupting).
|
||||
|
||||
If the product is unavailable, our Manager will notify the customer, issue a refund,
|
||||
and cancel the order.
|
||||
@ -161,7 +161,7 @@ Event, you'll have to use Escalation, because BPMN does not allow Intermediate E
|
||||
and that Error Events cannot be Non-Interrupting.
|
||||
|
||||
In our example, we'll assume that if we failed to ship the product, we can try again later,
|
||||
so we will not end the Subprocess (Escalation events can be either Interrupting or
|
||||
so, we will not end the Subprocess (Escalation events can be either Interrupting or
|
||||
Non-Interrupting).
|
||||
|
||||
However, we still want to notify our customer of a delay, so we use a Non-Interrupting
|
||||
|
@ -28,7 +28,7 @@ selections in a collection.
|
||||
|
||||
Selecting more than one product
|
||||
|
||||
We'll also need to update our element docmentation to display all products.
|
||||
We'll also need to update our element documentation to display all products.
|
||||
|
||||
.. figure:: figures/documentation_multi.png
|
||||
:scale: 30%
|
||||
|
@ -17,7 +17,7 @@ instead of the `run.py <https://github.com/sartography/spiff-example-clie/blob/m
|
||||
Camunda's BPMN editor does not handle data objects in the expected way. You can create data object
|
||||
references, but there is no way to re-use data objects.
|
||||
|
||||
It also does not support Message Correlations, and the inteface for generating a message payload doesn't work
|
||||
It also does not support Message Correlations, and the interface for generating a message payload doesn't work
|
||||
well in a Python environment.
|
||||
|
||||
We have extended BPMN.js to correct some of these issues. The examples in this section were created using our
|
||||
@ -59,7 +59,7 @@ the 'Enter Payment Info' has been completed.
|
||||
Configuring Messages
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Messages are handled slightly differently in Spiff Message Events. On an Message Throw Event or Send Task,
|
||||
Messages are handled slightly differently in Spiff Message Events. On a Message Throw Event or Send Task,
|
||||
we define a payload, which is simply a bit of python code that will be evaluated against the task data and
|
||||
sent along with the message. In the corresponding Message Catch Event or Receive Task, we define a
|
||||
variable name where we'll store the result.
|
||||
|
@ -58,7 +58,7 @@ will create specs for all executable processes found in every file supplied. Th
|
||||
the specified process. Both search recursively for subprocesses; the only difference is
|
||||
the latter method limits the search start to the specified process.
|
||||
|
||||
Our examples are pretty simple and we're not loading any extraneous stuff, so we'll
|
||||
Our examples are pretty simple, and we're not loading any extraneous stuff, so we'll
|
||||
just always load everything. If your entire workflow is contained in your top-level
|
||||
process, you can omit the :code:`subprocess` argument, but if your workflow contains
|
||||
call activities, you'll need to use one of these methods to find the models for any
|
||||
@ -91,7 +91,7 @@ We create a mapping of task type to handler, which we'll pass to our workflow ru
|
||||
|
||||
This might not be a step you would need to do in an application you build, since
|
||||
you would likely have only one set of task specs that need to be parsed, handled, and
|
||||
serialized; however our `run` method is an awful lot of code to maintain in two separate
|
||||
serialized; however, our `run` method is an awful lot of code to maintain in two separate
|
||||
files.
|
||||
|
||||
Running a Workflow
|
||||
|
@ -171,7 +171,7 @@ Our :code:`select_option` function simply repeats the prompt until the user
|
||||
enters a value contained in the option list.
|
||||
|
||||
For other fields, we'll just store whatever the user enters, although in the case
|
||||
where they data type was specified to be a :code:`long`, we'll convert it to a
|
||||
where the data type was specified to be a :code:`long`, we'll convert it to a
|
||||
number.
|
||||
|
||||
Finally, we need to explicitly store the user-provided response in a variable
|
||||
@ -219,4 +219,3 @@ The template string can be obtained from :code:`task.task_spec.documentation`.
|
||||
|
||||
As noted above, our template class comes from Jinja. We render the template
|
||||
using the task data, which is just a dictionary.
|
||||
|
||||
|
@ -122,4 +122,3 @@ attached to will be cancelled if the event is received) or Non-Interrupting (in
|
||||
which case the task will continue). In both cases, flows may emanate from the
|
||||
Boundary Event, which will trigger those paths if the events occur while the task
|
||||
is being executed.
|
||||
|
||||
|
@ -4,7 +4,7 @@ Implementing Custom Tasks
|
||||
Introduction
|
||||
------------
|
||||
|
||||
In this second tutorial we are going to implement our own task, and
|
||||
In this second tutorial, we are going to implement our own task, and
|
||||
use serialization and deserialization to store and restore it.
|
||||
|
||||
If you haven't already, you should complete the first
|
||||
|
@ -34,4 +34,6 @@ Fire it up.
|
||||
#> flask run
|
||||
```
|
||||
|
||||
Any dependencies you add will now be available for SpiffWorkflow to call using a Service Task. What's more, those services are now discoverable! So when someone drops a Service Task into their diagram, they will have a dropdown list of all the services you have made available to them. And those services will know what parameters are required, and can prompt diagram authors to provide information necessary to make the call. This can be no parameters at all (just give me a fact about Chuck Norris) to arbitrarily complex parameters such as a json structure to be added to a DynamoDB Table.
|
||||
Any dependencies you add will now be available for SpiffWorkflow to call using a Service Task. What's more, those services are now discoverable! So when someone drops a Service Task into their diagram, they will have a dropdown list of all the services you have made available to them. And those services will know what parameters are required, and can prompt diagram authors to provide information necessary to make the call. Which can be no parameters at all (Just give me a fact about Chuck Norris) ... to complex parameters (a json structure to be added to a DynamoDB Table).
|
||||
|
||||
|
||||
|
44
connector-proxy-demo/poetry.lock
generated
44
connector-proxy-demo/poetry.lock
generated
@ -95,6 +95,42 @@ url = "https://github.com/sartography/connector-aws.git"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "ad386286bcc72eeb000b9b053596dfee40f7c6b5"
|
||||
|
||||
[[package]]
|
||||
name = "connector-http"
|
||||
version = "0.1.0"
|
||||
description = "Make HTTP Requests available to SpiffWorkflow Service Tasks"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.11"
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
requests = "^2.28.1"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/connector-http.git"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "337671b38f47bd8a3113bc6fa85b987828c4ee66"
|
||||
|
||||
[[package]]
|
||||
name = "connector-slack"
|
||||
version = "0.1.0"
|
||||
description = "Send messages to Slack through a SpiffWorkflow Service Task"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.11"
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
requests = "^2.28.1"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/connector-slack.git"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "8390faca568f769f37412c7a58041bbad1695f31"
|
||||
|
||||
[[package]]
|
||||
name = "Flask"
|
||||
version = "2.2.2"
|
||||
@ -308,7 +344,7 @@ Flask-OAuthlib = "^0.9.6"
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/spiffworkflow-proxy"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "5e4926030cf6f2808ddb8e65527168dd914e5fc3"
|
||||
resolved_reference = "cfe9b93665e10390a2e64c492c57bd2613364588"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
@ -339,8 +375,8 @@ watchdog = ["watchdog"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "cc395c0c1ce2b0b7ca063a17617981b2d55db39802265b36f0bc3c4383c89919"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "3ac32d6902d0f7e425db530dd3f907f3f3e3f1717c4e77c955e31f39fd3bdeec"
|
||||
|
||||
[metadata.files]
|
||||
boto3 = [
|
||||
@ -372,6 +408,8 @@ colorama = [
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
connector-aws = []
|
||||
connector-http = []
|
||||
connector-slack = []
|
||||
Flask = [
|
||||
{file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"},
|
||||
{file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"},
|
||||
|
@ -8,10 +8,12 @@ readme = "README.md"
|
||||
#packages = [{include = "connector_proxy_demo", from = "."}]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
python = "^3.11"
|
||||
Flask = "^2.2.2"
|
||||
spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"}
|
||||
connector-aws = { git = "https://github.com/sartography/connector-aws.git"}
|
||||
connector-http = {git = "https://github.com/sartography/connector-http.git"}
|
||||
connector-slack = {git = "https://github.com/sartography/connector-slack.git"}
|
||||
gunicorn = "^20.1.0"
|
||||
|
||||
[build-system]
|
||||
|
@ -35,6 +35,7 @@ services:
|
||||
RUN_BACKGROUND_SCHEDULER: "true"
|
||||
OPEN_ID_CLIENT_ID: "spiffworkflow-backend"
|
||||
OPEN_ID_CLIENT_SECRET_KEY: "my_open_id_secret_key"
|
||||
CONNECTOR_PROXY_URL: "http://spiffworkflow-connector:8004"
|
||||
ports:
|
||||
- "${SPIFF_BACKEND_PORT:-8000}:${SPIFF_BACKEND_PORT:-8000}/tcp"
|
||||
volumes:
|
||||
|
Loading…
x
Reference in New Issue
Block a user