Squashed 'SpiffWorkflow/' content from commit 63db3e4

git-subtree-dir: SpiffWorkflow
git-subtree-split: 63db3e45947ec66b8d0efc2c74064004f8ff482c
This commit is contained in:
Jon Herron 2022-10-12 10:19:53 -04:00
commit 0892db6fa7
638 changed files with 203081 additions and 0 deletions

9
.editorconfig Normal file
View File

@ -0,0 +1,9 @@
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
max_line_length = 79

2
.flake8 Normal file
View File

@ -0,0 +1,2 @@
[flake8]

View File

@ -0,0 +1,33 @@
name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI
on:
release:
types: [published]
jobs:
build-n-publish:
name: Build and publish Python 🐍 distributions 📦 to PyPI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Set up Python 3.9
uses: actions/setup-python@v1
with:
python-version: 3.9
- name: Install pypa/build
run: >-
python -m
pip install
build
--user
- name: Build a binary wheel and a source tarball
run: >-
python -m
build
--sdist
--wheel
--outdir dist/
- name: Publish distribution 📦 to PyPI
if: startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@master
with:
username: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

279
.gitignore vendored Normal file
View File

@ -0,0 +1,279 @@
# Created by https://www.toptal.com/developers/gitignore/api/python,intellij+all,macos
# Edit at https://www.toptal.com/developers/gitignore?templates=python,intellij+all,macos
### Intellij+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### Intellij+all Patch ###
# Ignores the whole .idea folder and all .iml files
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
.idea/
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
*.iml
modules.xml
.idea/misc.xml
*.ipr
# Sonarlint plugin
.idea/sonarlint
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# End of https://www.toptal.com/developers/gitignore/api/python,intellij+all,macos
*.py[co]
*.swp
dist
/build
*.egg-info
unit_test.cfg
nosetests.xml
.coverage
coverage.xml
.c9revisions
.idea
/venv

16
.readthedocs.yml Normal file
View File

@ -0,0 +1,16 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
build:
image: latest
# Optionally build your docs in additional formats such as PDF
formats: []
python:
pip_install: true
version: 3.7
extra_requirements:
- docs

7
.sonarcloud.properties Normal file
View File

@ -0,0 +1,7 @@
sonar.organization=sartography
sonar.projectKey=sartography_SpiffWorkflow
sonar.host.url=https://sonarcloud.io
sonar.exclusions=*.bpmn,*.dmn,doc/**
sonar.sources=SpiffWorkflow
sonar.test.inclusions=tests
sonar.python.coverage.reportPaths=tests/SpiffWorkflow/coverage.xml

1
.tool-versions Normal file
View File

@ -0,0 +1 @@
python 3.10.4

36
.travis.yml Normal file
View File

@ -0,0 +1,36 @@
dist: focal
language: python
python:
- 3.7
- 3.8
- 3.9
- 3.10
addons:
sonarcloud:
organization: sartography
install:
- pip install -r requirements.txt
- pip install celery
script:
- cd tests/SpiffWorkflow
- coverage run --source=SpiffWorkflow -m unittest discover -v . "*Test.py"
- coverage xml -i
- cd ../..
after_success:
- sonar-scanner
git:
depth: false
jobs:
include:
- python: 3.7
- python: 3.8
- python: 3.9
- python: 3.10
env: RUN_QUALITY_GATES=true

5
AUTHORS Normal file
View File

@ -0,0 +1,5 @@
Samuel Abels <http://github.com/knipknap/>
Ziad Sawalha <http://github.com/ziadsawalha/>
Matthew Hampton <http://github.com/matthewhampton/>
Kelly McDonald
Dan Funk

60
CONTRIB Normal file
View File

@ -0,0 +1,60 @@
Guide for Contributors
#######################
Coding style:
Please follow PEP8: http://www.python.org/dev/peps/pep-0008/
Testing:
Non-public classes and methods MUST be prefixed by _. This is also important
because the test and API documentation machinery makes assumptions based on
this convention.
Every added public class MUST have a corresponding unit test. The tests are
placed in the following directory: tests/SpiffWorkflow/
The test directory layout mirrors the source code directory layout, e.g.
SpiffWorkflow/specs/Join.py
has a corresponding test in
tests/SpiffWorkflow/specs/JoinTest.py
The unit test for each class MUST have a CORRELATE class attribute that points
to the tested class. (The test machinery uses this attribute to find untested
methods.)
Each commit MUST NOT break functionality. In other words, the code in the
repository should function at any time, and all test MUST pass.
Documentation:
Every public class and function or method MUST include API documentation. The
documentation MUST cover the method's arguments and return values.
Write inline documentation generously.
Repository:
Make sure that each commit contains related changes only. E.g. don't fix
two unrelated bugs in one commit, or introduce a new feature while refactoring
another part of the program in the same commit. When in doubt, use multiple
small commits. In general, most commits should be relatively small unless they
are plain additions.
Licensing:
You have to agree to licensing under the lGPLv3, and every added file MUST
include a copyright header.
If you modify a file and add a chunk of at least 7 lines in size, please add
yourself to the copyright header of that file.
## Releases
For you dev op folks who release builds to the larger community ...
Be sure to edit the conf.py, and update the release tag: doc/conf.py
And also edit setup.py and assure that has the same release tag.
New versions of SpiffWorkflow are automatically published to PyPi whenever
a maintainer of our GitHub repository creates a new release on GitHub. This
is managed through GitHub's actions. The configuration of which can be
found in .github/workflows/....
Just create a release in GitHub that mathches the release number in doc/conf.py

165
COPYING Normal file
View File

@ -0,0 +1,165 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.

4
Dockerfile Normal file
View File

@ -0,0 +1,4 @@
FROM python:3.6
RUN apt-get -y update && apt-get upgrade -yu
COPY . /tmp/SpiffWorkflow
RUN cd /tmp/SpiffWorkflow && make wheel && pip install dist/SpiffWorkflow*.whl

3
INSTALL Normal file
View File

@ -0,0 +1,3 @@
To install this package, run
sudo python setup.py install --prefix /usr/local

88
Makefile Normal file
View File

@ -0,0 +1,88 @@
NAME=SpiffWorkflow
VERSION=`python setup.py --version`
PREFIX=/usr/local/
BIN_DIR=$(PREFIX)/bin
SITE_DIR=$(PREFIX)`python -c "import sys; from distutils.sysconfig import get_python_lib; print get_python_lib()[len(sys.prefix):]"`
###################################################################
# Standard targets.
###################################################################
.PHONY : clean
clean:
find . -name "*.pyc" -o -name "*.pyo" | xargs -rn1 rm -f
find . -name "*.egg-info" | xargs -rn1 rm -r
rm -Rf build
cd doc; make clean
.PHONY : dist-clean
dist-clean: clean
rm -Rf dist
.PHONY : doc
doc:
cd doc; make
install:
mkdir -p $(SITE_DIR)
./version.sh
export PYTHONPATH=$(SITE_DIR):$(PYTHONPATH); \
python setup.py install --prefix $(PREFIX) \
--install-scripts $(BIN_DIR) \
--install-lib $(SITE_DIR)
./version.sh --reset
uninstall:
# Sorry, Python's distutils support no such action yet.
.PHONY : tests
tests:
cd tests/$(NAME)
PYTHONPATH=../.. python -m unittest discover -v . "*Test.py"
.PHONY : tests-cov
tests-cov:
cd tests/$(NAME)
coverage run --source=$(NAME) -m unittest discover -v . "*Test.py"
.PHONY : tests-ind
tests-ind:
cd tests/$(NAME)
@PYTHONPATH=../.. find . -name "*Test.py" -printf '%p' -exec python -m unittest {} \;
.PHONY : tests-timing
tests-timing:
@make tests-ind 2>&1 | ./scripts/test_times.py
###################################################################
# Package builders.
###################################################################
targz: clean
./version.sh
python setup.py sdist --formats gztar
./version.sh --reset
tarbz: clean
./version.sh
python setup.py sdist --formats bztar
./version.sh --reset
wheel: clean
./version.sh
python setup.py bdist_wheel --universal
./version.sh --reset
deb: clean
./version.sh
debuild -S -sa
cd ..; sudo pbuilder build $(NAME)_$(VERSION)-0ubuntu1.dsc; cd -
./version.sh --reset
dist: targz tarbz wheel
###################################################################
# Publishers.
###################################################################
dist-publish:
./version.sh
python setup.py bdist_wheel --universal upload
./version.sh --reset

121
README.md Normal file
View File

@ -0,0 +1,121 @@
## SpiffWorkflow
![Logo](./graphics/logo_med.png)
Spiff Workflow is a workflow engine implemented in pure Python. It is based on
the excellent work of the Workflow Patterns initiative. In 2020 and 2021,
extensive support was added for BPMN / DMN processing.
## Motivation
We created SpiffWorkflow to support the development of low-code business
applications in Python. Using BPMN will allow non-developers to describe
complex workflow processes in a visual diagram, coupled with a powerful python
script engine that works seamlessly within the diagrams. SpiffWorkflow can parse
these diagrams and execute them. The ability for businesses to create
clear, coherent diagrams that drive an application has far reaching potential.
While multiple tools exist for doing this in Java, we believe that wide
adoption of the Python Language, and it's ease of use, create a winning
strategy for building Low-Code applications.
## Build status
[![Build Status](https://travis-ci.com/sartography/SpiffWorkflow.svg?branch=master)](https://travis-ci.org/sartography/SpiffWorkflow)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=alert_status)](https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow)
[![Coverage](https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=coverage)](https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow)
[![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow)
[![Documentation Status](https://readthedocs.org/projects/spiffworkflow/badge/?version=latest)](http://spiffworkflow.readthedocs.io/en/latest/?badge=latest)
[![Issues](https://img.shields.io/github/issues/sartography/spiffworkflow)](https://github.com/sartography/SpiffWorkflow/issues)
[![Pull Requests](https://img.shields.io/github/issues-pr/sartography/spiffworkflow)](https://github.com/sartography/SpiffWorkflow/pulls)
## Code style
[![PEP8](https://img.shields.io/badge/code%20style-pep8-orange.svg)](https://www.python.org/dev/peps/pep-0008/)
## Dependencies
We've worked to minimize external dependencies. We rely on lxml for parsing
XML Documents, and there is some legacy support for Celery, but it is not
core to the implementation, it is just a way to interconnect these systems.
<b>Built with</b>
- [lxml](https://lxml.de/)
- [celery](https://docs.celeryproject.org/en/stable/)
## Features
* __BPMN__ - support for parsing BPMN diagrams, including the more complex
components, like pools and lanes, multi-instance tasks, sub-workflows, timer
events, signals, messages, boudary events and looping.
* __DMN__ - We have a baseline implementation of DMN that is well integrated
with our Python Execution Engine.
* __Forms__ - forms, including text fields, selection lists, and most every other
thing you can be extracted from the Camunda xml extension, and returned as
json data that can be used to generate forms on the command line, or in web
applications (we've used Formly to good success)
* __Python Workflows__ - We've retained support for building workflows directly
in code, or running workflows based on a internal json data structure.
_A complete list of the latest features is available with our [release notes](https://github.com/sartography/SpiffWorkflow/releases/tag/1.0) for
version 1.0._
## Code Examples and Documentation
Detailed documentation is available on [ReadTheDocs](https://spiffworkflow.readthedocs.io/en/latest/)
Also, checkout our [example application](https://github.com/sartography/spiff-example-cli), which we
reference extensively from the Documentation.
## Installation
```
pip install spiffworkflow
```
## Tests
```
cd tests/SpiffWorkflow
coverage run --source=SpiffWorkflow -m unittest discover -v . "*Test.py"
```
## Support
You can find us on Discord at https://discord.gg/zDEBEnrF
Commercial support for SpiffWorkflow is available from
[Sartography](https://sartography.com)
## Contribute
Pull Requests are and always will be welcome!
Please check your formatting, assure that all tests are passing, and include
any additional tests that can demonstrate the new code you created is working
as expected. If applicable, please reference the issue number in your pull
request.
## Credits and Thanks
Samuel Abels (@knipknap) for creating SpiffWorkflow and maintaining it for over
a decade.
Matthew Hampton (@matthewhampton) for his initial contributions around BPMN
parsing and execution.
The University of Virginia for allowing us to take on the mammoth task of
building a general-purpose workflow system for BPMN, and allowing us to
contribute that back to the open source community. In particular, we would like
to thank [Ron Hutchins](https://www.linkedin.com/in/ron-hutchins-b19603123/),
for his trust and support. Without him our efforts would not be possible.
Bruce Silver, the author of BPMN Quick and Easy Using Method and Style, whose
work we referenced extensively as we made implementation decisions and
educated ourselves on the BPMN and DMN standards.
The BPMN.js library, without which we would not have the tools to effectively
build out our models, embed an editor in our application, and pull this mad
mess together.
Kelly McDonald (@w4kpm) who dove deeper into the core of SpiffWorkflow than
anyone else, and was instrumental in helping us get some of these major
enhancements working correctly.
Thanks also to the many contributions from our community. Large and small.
From Ziad (@ziadsawalha) in the early days to Elizabeth (@essweine) more
recently. It is good to be a part of this long lived and strong
community.
## License
GNU LESSER GENERAL PUBLIC LICENSE

5
TODO Normal file
View File

@ -0,0 +1,5 @@
* Write an asynchronous server.
* As soon as it is possible to trigger an action twice without
creating another branch (some kind of asynchronous notification,
perhaps), make sure to test the generalized AND-join with that
in xml/patterns/generalized_and_join.xml.

4
VERSION.in Normal file
View File

@ -0,0 +1,4 @@
"""
Warning: This file is automatically generated.
"""
__version__ = '@VERSION@'

1
doc/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
_build

221
doc/Makefile Normal file
View File

@ -0,0 +1,221 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " apidoc to build in the api documentation"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: apidoc
apidoc:
sphinx-apidoc -d5 -Mefo . ../SpiffWorkflow
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/SpiffWorkflow.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/SpiffWorkflow.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/SpiffWorkflow"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/SpiffWorkflow"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

25
doc/bpmn/Makefile Normal file
View File

@ -0,0 +1,25 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
.PHONY: apidoc
apidoc:
sphinx-apidoc -d5 -Mefo . ../venv/lib/python3.7/site-packages/SpiffWorkflow
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

423
doc/bpmn/advanced.rst Normal file
View File

@ -0,0 +1,423 @@
A More In-Depth Look at Some of SpiffWorkflow's Features
========================================================
Displaying Workflow State
-------------------------
Filtering Tasks
^^^^^^^^^^^^^^^
In our earlier example, all we did was check the lane a task was in and display
it along with the task name and state.
Lets take a look at a sample workflow with lanes:
.. figure:: figures/lanes.png
:scale: 30%
:align: center
Workflow with lanes
To get all of the tasks that are ready for the 'Customer' workflow, we could
specify the lane when retrieving ready user tasks:
.. code:: python
ready_tasks = workflow.get_ready_user_tasks(lane='Customer')
If there were no tasks ready for the 'Customer' lane, you would get an empty list,
and of course if you had no lane that was labeled 'Customer' you would *always* get an
empty list.
We can also get a list of tasks by state.
We need to import the :code:`Task` object (unless you want to memorize which numbers
correspond to which states).
.. code:: python
from SpiffWorkflow.task import Task
To get a list of completed tasks
.. code:: python
tasks = workflow.get_tasks(Task.COMPLETED)
The tasks themselves are not particularly intuitive to work with. So SpiffWorkflow
provides some facilities for obtaining a more user-friendly version of upcoming tasks.
Nav(igation) List
^^^^^^^^^^^^^^^^^
In order to get the navigation list, we can call the workflow.get_nav_list() function. This
will return a list of dictionaries with information about each task and decision point in the
workflow. Each item in this list returns some information about the tasks that are in the workflow,
and how it relates to the other tasks.
To give you an idea of what is in the list I'll include a segment from the documentation::
id - TaskSpec or Sequence flow id
task_id - The uuid of the actual task instance, if it exists.
name - The name of the task spec (or sequence)
description - Text description
backtracks - Boolean, if this backtracks back up the list or not
level - Depth in the tree - probably not needed
indent - A hint for indentation
child_count - The number of children that should be associated with
this item.
lane - This is the swimlane for the task if indicated.
state - Text based state (may be half baked in the case that we have
more than one state for a task spec - but I don't think those
are being reported in the list, so it may not matter)
Any task with a blank or None as the description are excluded from the list (i.e. gateways)
Because the output from this list may be used in a variety of contexts, the implementation is left to the user.
MultiInstance Notes
-------------------
**loopCardinality** - This variable can be a text representation of a
number - for example '2' or it can be the name of a variable in
task.data that resolves to a text representation of a number.
It can also be a collection such as a list or a dictionary. In the
case that it is a list, the loop cardinality is equal to the length of
the list and in the case of a dictionary, it is equal to the list of
the keys of the dictionary.
If loopCardinality is left blank and the Collection is defined, or if
loopCardinality and Collection are the same collection, then the
MultiInstance will loop over the collection and update each element of
that collection with the new information. In this case, it is assumed
that the incoming collection is a dictionary, currently behavior for
working with a list in this manner is not defined and will raise an error.
**Collection** This is the name of the collection that is created from
the data generated when the task is run. Examples of this would be
form data that is generated from a UserTask or data that is generated
from a script that is run. Currently the collection is built up to be
a dictionary with a numeric key that corresponds to the place in the
loopCardinality. For example, if we set the loopCardinality to be a
list such as ['a','b','c] the resulting collection would be {1:'result
from a',2:'result from b',3:'result from c'} - and this would be true
even if it is a parallel MultiInstance where it was filled out in a
different order.
**Element Variable** This is the variable name for the current
iteration of the MultiInstance. In the case of the loopCardinality
being just a number, this would be 1,2,3, . . . If the
loopCardinality variable is mapped to a collection it would be either
the list value from that position, or it would be the value from the
dictionary where the keys are in sorted order. It is the content of the
element variable that should be updated in the task.data. This content
will then be added to the collection each time the task is completed.
Example:
In a sequential MultiInstance, loop cardinality is ['a','b','c'] and elementVariable is 'myvar'
then in the case of a sequential multiinstance the first call would
have 'myvar':'a' in the first run of the task and 'myvar':'b' in the
second.
Example:
In a Parallel MultiInstance, Loop cardinality is a variable that contains
{'a':'A','b':'B','c':'C'} and elementVariable is 'myvar' - when the multiinstance is ready, there
will be 3 tasks. If we choose the second task, the task.data will
contain 'myvar':'B'.
Custom Script Engines
---------------------
You may need to modify the default script engine, whether because you need to make additional
functionality available to it, or because you might want to restrict its capabilities for
security reasons.
.. warning::
The default script engine does little to no sanitization and uses :code:`eval`
and :code:`exec`! If you have security concerns, you should definitely investigate
replacing the default with your own implementation.
The default script engine imports the following objects:
- :code:`timedelta`
- :code:`datetime`
- :code:`dateparser`
- :code:`pytz`
You could add other functions or classes from the standard python modules or any code you've
implemented yourself.
In our example models so far, we've been using DMN tables to obtain product information. DMN
tables have a **lot** of uses so we wanted to feature them prominently, but in a simple way.
If a customer was selecting a product, we would surely have information about how the product
could be customized in a database somewhere. We would not hard code product information in
our diagram (although it is much easier to modify the BPMN diagram than to change the code
itself!). Our shipping costs would not be static, but would depend on the size of the order and
where it was being shipped -- maybe we'd query an API provided by our shipper.
SpiffWorkflow is obviously **not** going to know how to make a call to **your** database or
make API calls to **your** vendors. However, you can implement the calls yourself and make them
available as a method that can be used within a script task.
We are not going to actually include a database or API and write code for connecting to and querying
it, but we can model our database with a simple dictionary lookup since we only have 7 products
and just return the same static info for shipping for the purposes of the tutorial.
.. code:: python
from collections import namedtuple
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
ProductInfo = namedtuple('ProductInfo', ['color', 'size', 'style', 'price'])
INVENTORY = {
'product_a': ProductInfo(False, False, False, 15.00),
'product_b': ProductInfo(False, False, False, 15.00),
'product_c': ProductInfo(True, False, False, 25.00),
'product_d': ProductInfo(True, True, False, 20.00),
'product_e': ProductInfo(True, True, True, 25.00),
'product_f': ProductInfo(True, True, True, 30.00),
'product_g': ProductInfo(False, False, True, 25.00),
}
def lookup_product_info(product_name):
return INVENTORY[product_name]
def lookup_shipping_cost(shipping_method):
return 25.00 if shipping_method == 'Overnight' else 5.00
additions = {
'lookup_product_info': lookup_product_info,
'lookup_shipping_cost': lookup_shipping_cost
}
CustomScriptEngine = PythonScriptEngine(scriptingAdditions=additions)
We pass the script engine we created to the workflow when we load it.
.. code:: python
return BpmnWorkflow(parser.get_spec(process), script_engine=CustomScriptEngine)
We can use the custom functions in script tasks like any normal function:
.. figure:: figures/custom_script_usage.png
:scale: 30%
:align: center
Workflow with lanes
And we can simplify our 'Call Activity' flows:
.. figure:: figures/call_activity_script_flow.png
:scale: 30%
:align: center
Workflow with lanes
To run this workflow:
.. code-block:: console
./run.py -p order_product -b bpmn/call_activity_script.bpmn bpmn/top_level_script.bpmn
We have also done some work using `Restricted Python <https://restrictedpython.readthedocs.io/en/latest/>`_
to provide more secure alternatives to standard python functions.
Serialization
-------------
.. warning::
Serialization Changed in Version 1.1.7. Support for pre-1.1.7 serialization will be dropped in 1.2.
The old serialization method still works but it is deprecated.
To migrate your system to the new version, see "Migrating between
serialization versions" below.
So far, we've only considered the context where we will run the workflow from beginning to end in one
setting. This may not always be the case, we may be executing the workflow in the context of a web server where we
may have a user request a web page where we open a specific workflow that we may be in the middle of, do one step of
that workflow and then the user may be back in a few minutes, or maybe a few hours depending on the application.
To accomplish this, we can import the serializer
.. code:: python
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer
This class contains a serializer for a workflow containing only standard BPMN Tasks. Since we are using custom task
classes (the Camunda :code:`UserTask` and the DMN :code:`BusinessRuleTask`), we'll need to import serializers for those task s
pecs as well.
.. code:: python
from SpiffWorkflow.camunda.serializer import UserTaskConverter
from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter
Strictly speaking, these are not serializers per se: they actually convert the tasks into dictionaries of
JSON-serializable objects. Conversion to JSON is done only as the last step and could easily be replaced with some
other output format.
We'll need to configure a Workflow Spec Converter with our custom classes:
.. code:: python
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
[ UserTaskConverter, BusinessRuleTaskConverter ])
We create a serializer that can handle our extended task specs:
.. code:: python
serializer = BpmnWorkflowSerializer(wf_spec_converter)
We'll give the user the option of dumping the workflow at any time.
.. code:: python
filename = input('Enter filename: ')
state = serializer.serialize_json(workflow)
with open(filename, 'w') as dump:
dump.write(state)
We'll ask them for a filename and use the serializer to dump the state to that file.
To restore the workflow:
.. code:: python
if args.restore is not None:
with open(args.restore) as state:
wf = serializer.deserialize_json(state.read())
The workflow serializer is designed to be flexible and modular and as such is a little complicated. It has
two components:
- a workflow spec converter (which handles workflow and task specs)
- a data converter (which handles workflow and task data).
The default workflow spec converter likely to meet your needs, either on its own, or with the inclusion of
:code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` and :code:`dmn` subpackages of this
library, and all you'll need to do is add them to the list of task converters, as we did above.
However, he default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime`
and :code:`timedelta` objects (we make these available in our default script engine) and UUIDs. If your
workflow or task data contains objects that are not JSON-serializable, you'll need to extend ours, or extend
its base class to create one of your own.
To do extend ours:
1. Subclass the base data converter
2. Register classes along with functions for converting them to and from dictionaries
.. code:: python
from SpiffWorkflow.bpmn.serializer.dictionary import DictionaryConverter
class MyDataConverter(DictionaryConverter):
def __init__(self):
super().__init__()
self.register(MyClass, self.my_class_to_dict, self.my_class_from_dict)
def my_class_to_dict(self, obj):
return obj.__dict__
def my_class_from_dict(self, dct):
return MyClass(**dct)
More information can be found in the class documentation for the
`default converter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/bpmn_converters.py>`_
and its `base class <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/dictionary.py>`_
.
You can also replace ours entirely with one of your own. If you do so, you'll need to implement `convert` and
`restore` methods. The former should return a JSON-serializable representation of your workflow data; the
latter should recreate your data from the serialization.
If you have written any custom task specs, you'll need to implement task spec converters for those as well.
Task Spec converters are also based on the :code:`DictionaryConverter`. You should be able to use the
`BpmnTaskSpecConverter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/bpmn_converters.py>`_
as a basis for your custom specs. It provides some methods for extracting attributes from Spiff base classes as well as
standard BPNN attributes from tasks that inherit from :code:`BMPNSpecMixin`.
The `Camunda User Task Converter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/camunda/serializer/task_spec_converters.py>`_
should provide a simple example of how you might create such a converter.
Migrating Between Serialization Versions
----------------------------------------
Old (Non-Versioned) Serializer
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Prior to Spiff 1.1.7, the serialized output did not contain a version number.
.. code:: python
old_serializer = BpmnSerializer() # the deprecated serializer.
# new serializer, which can be customized as described above.
serializer = BpmnWorkflowSerializer(version="MY_APP_V_1.0")
The new serializer has a :code:`get_version` method that will read the version
back out of the serialized json. If the version isn't found, it will return
:code:`None`, and you can then assume it is using the old style serializer.
.. code:: python
version = serializer.get_version(some_json)
if version == "MY_APP_V_1.0":
workflow = serializer.deserialize_json(some_json)
else:
workflow = old_serializer.deserialize_workflow(some_json, workflow_spec=spec)
If you are not using any custom tasks and do not require custom serialization, then you'll be able to
serialize the workflow in the new format:
.. code:: python
new_json = serializer.serialize_json(workflow)
However, if you use custom tasks or data serialization, you'll also need to specify workflow spec or data
serializers, as in the examples in the previous section, before you'll be able to serialize with the new serializer.
The code would then look more like this:
.. code:: python
from SpiffWorkflow.camunda.serializer import UserTaskConverter
old_serializer = BpmnSerializer() # the deprecated serializer.
# new serializer, with customizations
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([UserTaskConverter])
data_converter = MyDataConverter
serializer = BpmnWorkflowSerializer(wf_spec_converter, data_converter, version="MY_APP_V_1.0")
version = serializer.get_version(some_json)
if version == "MY_APP_V_1.0":
workflow = serializer.deserialize_json(some_json)
else:
workflow = old_serializer.deserialize_workflow(some_json, workflow_spec=spec)
new_json = serializer.serialize_json(workflow)
Because the serializer is highly customizable, we've made it possible for you to manage your own versions of the
serialization. You can do this by passing a version number into the serializer, which will be embedded in the
json of all workflows. This allow you to modify the serialization and customize it over time, and still manage
the different forms as you make adjustments without leaving people behind.
Versioned Serializer
^^^^^^^^^^^^^^^^^^^^
As we make changes to Spiff, we may change the serialization format. For example, in 1.1.8, we changed
how subprocesses were handled interally in BPMN workflows and updated how they are serialized. If you have
not overridden our version number with one of your own, the serializer will transform the 1.0 format to the
new 1.1 format.
If you've overridden the serializer version, you may need to incorporate our serialization changes with
your own. You can find our conversions in
`version_migrations.py <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/version_migration.py>`_

60
doc/bpmn/conf.py Executable file
View File

@ -0,0 +1,60 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
numfig = True
# -- Project information -----------------------------------------------------
project = 'SpiffWorkflow-BPMN Documentation'
copyright = '2020, Sartography'
author = 'Sartography'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', # 'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
#'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
intersphinx_mapping = {'http://docs.python.org/': None}

260
doc/bpmn/events.rst Normal file
View File

@ -0,0 +1,260 @@
Events
======
BPMN Model
----------
We'll be using the following files from `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
- `transaction <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/transaction.bpmn>`_ workflow
- `signal_event <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/signal_event.bpmn>`_ workflow
- `events <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/events.bpmn>`_ workflow
- `call activity <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/call_activity.bpmn>`_ workflow
- `product_prices <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/product_prices.dmn>`_ DMN table
- `shipping_costs <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/shipping_costs.dmn>`_ DMN table
A general overview of events in BPMN can be found in the :doc:`/intro`
section of the documentation.
SpiffWorkflow supports the following Event Definitions:
- `Cancel Events`_
- `Signal Events`_
- `Terminate Events`_
- `Error Events`_
- `Escalation Events`_
- `Timer Events`_
- `Message Events`_
We'll include examples of all of these types in this section.
Transactions
^^^^^^^^^^^^
We also need to introduce the concept of a Transaction, bceause certain events
can only be used in that context. A Transaction is essentially a subprocess, but
it must fully complete before it affects its outer workflow.
We'll make our customer's ordering process through the point they review their order
into a Transaction. If they do not complete their order, then product selections and
customizations will be discarded; if they place the order, the workflow will proceed
as before.
We'll also introduce our first event type, the Cancel Event. Cancel Events can
only be used in Transactions.
Cancel Events
^^^^^^^^^^^^^
.. figure:: figures/transaction.png
:scale: 30%
:align: center
Workflow with a transaction and Cancel Event
We changed our 'Review Order' Task to be a User Task and have added a form, so
that we can give the customer the option of cancelling the order. If the customer
answers 'Y', then the workflow ends normally and we proceed to collecting
payment information.
However, if the user elects to cancel their order, we use a 'Cancel End Event'
instead, which generates a Cancel Event. We can then attach a 'Cancel Boundary
Event' to the Transaction, and execute that path if the event occurs. Instead of
asking the customer for their payment info, we'll direct them to a form and ask
them why they cancelled their order.
If the order is placed, the workflow will contain the order data; if it is
cancelled, it will contain the reason for cancellation instead.
To run this workflow
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/transaction.bpmn bpmn/call_activity.bpmn
Signal Events
^^^^^^^^^^^^^
.. figure:: figures/signal_event.png
:scale: 30%
:align: center
Workflow with Signal Events
Suppose we also want to give our customer the ability to cancel their order at
any time up until they are charged. We need to throw an event after the charge
is placed and catch this event before the user completes the 'Cancel Order' task.
Once the charge is placed, the task that provides the option to cancel will
itself be cancelled when the charge event is received.
We'll also need to detect the case that the customer cancels their order and
cancel the charge task if it occurs; we'll use a separate signal for that.
Multiple tasks can catch the same signal event. Suppose we add a Manager role
to our workflow, and allow the Employee to refer unsuccessful charges to the
Manager for resolution. The Manager's task will also need to catch the 'Order
Cancelled' signal event.
Signals are referred to by name.
.. figure:: figures/throw_signal_event.png
:scale: 30%
:align: center
Signal Event configuration
.. Terminate Events:
Terminate Events
^^^^^^^^^^^^^^^^
We also added a Terminate Event to the Manager Workflow. A regular End Event
simply marks the end of a path. A Terminate Event will indicate that the
entire workflow is complete and any remaining tasks should be cancelled. Our
customer cannot cancel an order that has already been cancelled, and we won't ask
them for feedback about it (we know it wasn't completed), so we do not want to
execute either of those tasks.
We'll now modify our workflow to add an example of each of the other types of
events that SpiffWorkflow Supports.
To run this workflow
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/signal_event.bpmn bpmn/call_activity.bpmn
Error Events
^^^^^^^^^^^^
Let's turn to our order fulfillment subprocess. Either of these steps could
potentially fail, and we may want to handle each case differently.
.. figure:: figures/events.png
:scale: 30%
:align: center
Workflow with multiple event types
One potential failure is that our product is unavailable. This actually might be
a temporary problem, but we'll assume that it is a show stopper for the sake of
this tutorial.
We ask the Employee to verify that they were able to retrieve the product; if they
were unable to do so, then we generate an Error End Event, which we will handle
with an Interrupting Error Boundary Event (Error events are *always* Interrupting).
If the product is unavailable, our Manager will notify the customer, issue a refund,
and cancel the order.
Escalation Events
^^^^^^^^^^^^^^^^^
Escalation events are a lot like Error Events and as far as I can tell, which one
to use comes down to preference, with the caveat that if you want to use an Intermediate
Event, you'll have to use Escalation, because BPMN does not allow Intermediate Error Events,
and that Error Events cannot be Non-Interrupting.
In our example, we'll assume that if we failed to ship the product, we can try again later,
so we will not end the Subprocess (Escalation events can be either Interrupting or
Non-Interrupting).
However, we still want to notify our customer of a delay, so we use a Non-Interrupting
Escalation Boundary Event.
Both Error and Escalation Events can be optionally associated with a code. Here is
Throw Event for our `product_not_shipped` Escalation.
.. figure:: figures/throw_escalation_event.png
:scale: 30%
:align: center
Throw Escalation Event configuration
Error Event configuration is similar.
If no code is provided in a Catch event, any event of the corresponding type will catch
the event.
Timer Events
^^^^^^^^^^^^
In the previous section, we mentioned that that we would try again later if we were unable
to ship the order. We can use a Duration Timer Event to force our workflow to wait a certain
amount of time before continuing. We can use this as a regular Intermediate Event (in
'Try Again Later') or a Boundary Event. Timer Boundary Events can be Interrupting, but in
this case, we simply want to notify the customer of the delay while continuing to process
their order, so we use a Non-Interrupting Event.
.. figure:: figures/timer_event.png
:scale: 30%
:align: center
Duration Timer Event configuration
We express the duration as a Python :code:`timedelta`. We show the configuration for the Boundary
Event.
It is also possible to use a static datetime to trigger an event. It will need to be parseable
as a date by Python.
Timer events can only be caught, that is waited on. The timer begins implicitly when we
reach the event.
Message Events
^^^^^^^^^^^^^^
.. sidebar:: QA Lane
Ideally, this lane would be a process independent from the ordering process (we don't want
it to be cancelled just because an order eventually completes). However, limitations of how
SpiffWorkflow handles processes precludes multiple top-level processes.
In BPMN, Messages are used to communicate across processes and cannot be used within a
workflow, but SpiffWorkflow allows message communication between lanes as well as between
parent and child workflows. We'll use the first scenario in our example.
We've added a QA lane to out ordering process, whose job is investigating order order delays
and recommending improvements. This portion of our process will only be started when an
appropriate message is received.
Messages are similar to signals, in that they are referenced by name, but they have the
additional property that they may contain a payload.
.. note::
We currently depend on some Camunda-specific features in our implementation, but we
intend to replace this with our own.
.. figure:: figures/throw_message_event.png
:scale: 30%
:align: center
Throw Message Event configuration
The Throw Message Event Implementation should be 'Expression' and the Expression should
be a Python statement that can be evaluated. In this example, we'll just send the contents
of the :code:`reason_delayed` variable, which contains the response from the 'Investigate Delay'
Task.
We can provide a name for the result variable, but I have not done that here, as it does not
make sense to me for the generator of the event to tell the handler what to call the value.
If you *do* specify a result variable, the message payload (the expression evaluated in the
context of the Throwing task) will be added to the handling task's data in a variable of that
name; if you leave it blank, SpiffWorkflow will create a variable of the form <Handling
Task Name>_Response.
Running The Model
^^^^^^^^^^^^^^^^^
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/events.bpmn bpmn/call_activity.bpmn

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 133 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 158 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

BIN
doc/bpmn/figures/events.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

BIN
doc/bpmn/figures/lanes.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 166 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 172 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 137 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 144 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

73
doc/bpmn/gateways.rst Normal file
View File

@ -0,0 +1,73 @@
Gateways
========
BPMN Model
----------
In this section, we'll expand our model by creating alternate paths through the
workflow depending on the current workflow state, in this case, answers provided
by the user through forms.
We've also added a second DMN table to find the cost of the selected shipping
method, and we updated our order total calculations to incorporate that cost.
We'll be using the following files from `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
- `gateway_types <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/gateway_types.bpmn>`_ workflow
- `product_prices <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/product_prices.dmn>`_ DMN table
- `shipping_costs <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/shipping_costs.dmn>`_ DMN table
Exclusive Gateway
^^^^^^^^^^^^^^^^^
Exclusive gateways are used when exactly one alternative can be selected.
Suppose our products are T-shirts and we offer product C in several colors. After
the user selects a product, we check to see it if is customizable. Our default
branch will be 'Not Customizable', but we'll direct the user to a second form
if they select 'C'; our condition for choosing this branch is a simple python
expression.
.. figure:: figures/exclusive_gateway.png
:scale: 30%
:align: center
Flow configuration
Parallel Gateway
^^^^^^^^^^^^^^^^
.. sidebar:: IDs vs Names
We've assigned descriptive names to all our tasks so far. Text added to
the Name field will appear in the diagram, so sometimes it's better to
leave it blank to avoid visual clutter. I've put a description of the
gateway into the ID field instead.
Parallel gateways are used when the subsequent tasks do not need to be completed
in any particular order. The user can complete them in any sequence and the
workflow will wait for all tasks to be finished before advancing.
We do not care whether the user chooses a shipping method or enters their
address first, but they'll need to complete both tasks before continuing.
We don't need to do any particular configuration for this gateway type.
.. figure:: figures/parallel_gateway.png
:scale: 30%
:align: center
Parallel Gateway example
Running The Model
^^^^^^^^^^^^^^^^^
If you have set up our example repository, this model can be run with the
following command:
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/gateway_types.bpmn

77
doc/bpmn/index.rst Normal file
View File

@ -0,0 +1,77 @@
BPMN Workflows
==============
The basic idea of SpiffWorkflow is that you can use it to write an interpreter
in Python that creates business applications from BPMN models. In this section,
we'll develop a model of an example process and as well as a
simple workflow runner.
We expect that readers will fall into two general categories:
- People with a background in BPMN who might not be very familiar Python
- Python developers who might not know much about BPMN
This section of the documentation provides an example that (hopefully) serves
the needs of both groups. We will introduce the BPMN elements that SpiffWorkflow
supports and show how to build a simple workflow runner around them.
SpiffWorkflow does heavy-lifting such as keeping track of task dependencies and
states and providing the ability to serialize or deserialize a workflow that
has not been completed. The developer will write code for displaying workflow
state and presenting tasks to users of their application.
All the Python code and BPMN models used here are available in an example
project called `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
Quickstart
----------
Check out the code in `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_
and follow the instructions to set up an environment to run it in.
Run the sample workflow we built up using our example application with the following
command:
.. code-block:: console
./run.py -p order_product \
-d bpmn/{product_prices,shipping_costs}.dmn \
-b bpmn/{multiinstance,call_activity_multi}.bpmn
For a full description of program options:
.. code-block:: console
./run.py --help
The code in the workflow runner and the models in the bpmn directory of the
repository will be discussed in the remainder of this tutorial.
Supported BPMN Elements
-----------------------
.. toctree::
:maxdepth: 3
tasks
gateways
organization
events
multiinstance
Putting it All Together
-----------------------
.. toctree::
:maxdepth: 2
synthesis
Features in More Depth
----------------------
.. toctree::
:maxdepth: 2
advanced

35
doc/bpmn/make.bat Normal file
View File

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

112
doc/bpmn/multiinstance.rst Normal file
View File

@ -0,0 +1,112 @@
MultiInstance Tasks
===================
BPMN Model
----------
We'll be using the following files from `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
- `multiinstance <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/multiinstance.bpmn>`_ workflow
- `call activity multi <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/call_activity_multi.bpmn>`_ workflow
- `product_prices <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/product_prices.dmn>`_ DMN table
- `shipping_costs <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/shipping_costs.dmn>`_ DMN table
Suppose we want our customer to be able to select more than one product.
If we knew how many products they would select at the beginning of the workflow, we could
configure 'Select and Customize Product' as a Sequential MultiInstance Task. We would
specify the name of the collection and each iteration of the task would add a new item
to it.
Since we can't know in advance how many products the order, we'll need to modify that
workflow to ask them whether they want to continue shopping and maintain their product
selections in a collection.
.. figure:: figures/call_activity_multi.png
:scale: 30%
:align: center
Selecting more than one product
We'll also need to update our element docmentation to display all products.
.. figure:: figures/documentation_multi.png
:scale: 30%
:align: center
Updated Documentation for 'Review Order'
.. note::
Note that we are using a dot instead of the typical python dictionary access to obtain
the values. Spiff automatically generates such a representation, which simplifies creating the
documentation strings; however regular Python syntax will work as well.
Parallel MultiInstance
^^^^^^^^^^^^^^^^^^^^^^
We'll also update our 'Retrieve Product' task and 'Product Not Available' flows to
accommodate multiple products. We can use a Parallel MultiInstance for this, since
it does not matter what order our Employee retrieves the products in.
.. figure:: figures/multiinstance_task_configuration.png
:scale: 30%
:align: center
MultiInstance task configuration
Spiff will generate a task for each of the items in the collection. Because of the way
SpiffWorkflow manages the data for these tasks, the collection MUST be a dictionary.
Each value in the dictionary will be copied into a variable with the name specified in
the 'Element Variable' field, so you'll need to specify this as well.
.. figure:: figures/multiinstance_form_configuration.png
:scale: 30%
:align: center
MultiInstance form configuration
We'll also need to update the form field id so that the results will be added to the
item of the collection rather than the top level of the task data. This is where the
'Element Variable' field comes in: we'll need to change `product_available` to
`product.product_available`, because we set up `product` as our reference to the
current item.
.. figure:: figures/multiinstance_flow_configuration.png
:scale: 30%
:align: center
Product available flow configuration
Finally, we'll need to update our 'No' flow to check all items in the collection for
availability.
.. note::
In our form configuration, we used `product.product_available` but when we reference
it in the flow, we use the standard python dictionary syntax. We can't use that
notation in form fields, so in this case we need to use SpiffWorkflow's dot notation
conversion.
Sequential MultiInstance
^^^^^^^^^^^^^^^^^^^^^^^^
SpiffWorkflow also supports Sequential MultiInstance Tasks for previously defined
collections, or if the loopCardinality is known in advance, although we have not added an
example of this to our workflow.
For more information about MultiInstance Tasks and SpiffWorkflow, see :doc:`/bpmn/advanced`.
Running The Model
^^^^^^^^^^^^^^^^^
If you have set up our example repository, this model can be run with the
following command:
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/multiinstance.bpmn bpmn/call_activity_multi.bpmn

128
doc/bpmn/organization.rst Normal file
View File

@ -0,0 +1,128 @@
Organizing More Complex Workflows
=================================
BPMN Model
----------
We'll be using the following files from `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
- `lanes <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/lanes.bpmn>`_ workflow
- `top_level <https://github.com/sartography/spiff-example-cli/bpmn/top_level.bpmn>`_ workflow
- `call_activity <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/call_activity.bpmn>`_ workflow
- `product_prices <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/product_prices.dmn>`_ DMN table
- `shipping_costs <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/shipping_costs.dmn>`_ DMN table
Lanes
^^^^^
Lanes are a method in BPMN to distinguish roles for the workflow and who is
responsible for which actions. In some cases this will be different business
units, and in some cases this will be different individuals - it really depends
on the nature of the workflow. Within a BPMN editor, this is done by choosing the
'Create pool/participant' option from the toolbar on the left hand side.
We'll modify our workflow to get the customer's payment information and send it
to an employee who will charge the customer and fulfill the order.
.. figure:: figures/lanes.png
:scale: 30%
:align: center
Workflow with lanes
To run this workflow
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/lanes.bpmn
For a simple code example of displaying a tasks lane, see `Handling Lanes`_
Subprocesses
^^^^^^^^^^^^
In general, subprocesses are a way of grouping work into smaller units. This, in
theory, will help us to re-use sections of business logic, but it will also allow
us to treat groups of work as a unit.
Subprocesses come in two different flavors. In this workflow we see an Expanded
Subprocess. Unfortunately, we can't collapse an expanded subprocess within BPMN.js,
so expanded subprocesses are mainly useful for conceptualizing a group of tasks as
a unit.
It also possible to refer to external subprocesses via a Call Activity Task. This
allows us to 'call' a separate workflow in a different file by referencing the ID of
the called workflow, which can simplify business logic and make it re-usable.
We'll expand 'Fulfill Order' into sub tasks -- retrieving the product and shipping
the order -- and create an Expanded Subprocess.
We'll also expand our selection of products, adding several new products and the ability
to customize certain products by size and style in addition to color.
.. figure:: figures/dmn_table_updated.png
:scale: 30%
:align: center
Updated Product List
.. note::
I've added what customizations are available for each product in the 'Annotations'
column of the DMN table. This is not actually used by Spiff; it simply provides
the option of documenting the decisions contained in the table.
Since adding gateways for navigating the new options will add a certain amount of
clutter to our diagram, we'll create a separate workflow around selecting and
customizing products and refer to that in our main workflow.
.. figure:: figures/call_activity.png
:scale: 30%
:align: center
Subworkflow for product selection
When configuring the subworkflow, we need to make sure the 'CallActivity Type' of the
parent workflow is 'BPMN' and the 'Called Element' matches the ID we assigned in the
subworkflow.
.. figure:: figures/top_level.png
:scale: 30%
:align: center
Parent workflow
Running the Model
^^^^^^^^^^^^^^^^^
.. code-block:: console
./run.py -p order_product \
-d bpmn/product_prices.dmn bpmn/shipping_costs.dmn \
-b bpmn/top_level.bpmn bpmn/call_activity.bpmn
Example Application Code
------------------------
Handling Lanes
^^^^^^^^^^^^^^
We are not required to handle lanes in our application, but most of the time we'll
probably want a way of filtering on lanes and selectively displaying tasks. In
our sample application, we'll simply display which lane a task belongs to.
.. code:: python
if hasattr(task.task_spec, 'lane') and task.task_spec.lane is not None:
lane = f'[{task.task_spec.lane}]'
else:
lane = ''
The tasks lane can be obtained from :code:`task.task_spec.lane`. Not all tasks
will have a :code:`lane` attribute, so we need to check to make sure it exists
before attempting to access it (this is true for many task attributes).
See the Filtering Tasks Section of :doc:`advanced` more information
about working with lanes in Spiff.

223
doc/bpmn/synthesis.rst Normal file
View File

@ -0,0 +1,223 @@
Putting it All Together
=======================
In this section we'll be discussing the overall structure of the workflow
runner we developed in `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
Loading a Workflow
-------------------
We'll need the following imports:
.. code:: python
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from custom_script_engine import CustomScriptEngine
We need to create a parser. We could have imported :code:`BpmnParser`, which
these parsers inherit from, but we need some additional features that the base
parser does not provide.
.. code:: python
class Parser(BpmnDmnParser):
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
OVERRIDE_PARSER_CLASSES.update(CamundaParser.OVERRIDE_PARSER_CLASSES)
We'll use :code:`BpmnDmnParser` as our base class, because we would like the ability
to use DMN tables in our application. The :code:`BpmnDmnParser` provides a task
parser for Business Rule Tasks, which the underlying :code:`BpmnParser` it inherits from
does not contain.
We also imported the :code:`CamundaParser` so that we can parse some Camunda
specific features we'll use (forms in User Tasks). The :code:`CamundaParser` User
Task parser will override the default parser.
In general, any task parser can be replaced with a custom parser of your
own design if you have a BPMN modeller that produces XML not handled by the
BPMN parsers in SpiffWorkflow.
.. code:: python
def parse(process, bpmn_files, dmn_files):
parser = Parser()
parser.add_bpmn_files(bpmn_files)
if dmn_files:
parser.add_dmn_files(dmn_files)
top_level = parser.get_spec(process)
subprocesses = parser.get_process_specs()
return BpmnWorkflow(top_level, subprocesses, script_engine=CustomScriptEngine)
We create an instance of our previously defined parser, add the BPMN files to it, and
optionally add any DMN files, if they were supplied.
We'll obtain the workflow specification from the parser for the top level process
using :code:`parser.get_spec()`.
We'll get the specs of all the processes that were parsed with :code:`parser.get_process_specs()`
and provide these to the workflow as well. If your entire workflow is contained in your
top-level process, you can omit this argument, but if your workflow contains call activities,
you'll need to include it.
We also provide an enhanced script engine to our workflow. More information about how and
why you might want to do this is covered in :doc:`advanced`. The :code:`script_engine`
argument is optional and the default will be used if none is supplied.
We return a :code:`BpmnWorkflow` based on the specs that uses the our custom script engine
to execute script tasks and evaluate expressions.
Running a Workflow
------------------
This is our application's :code:`run()` method.
The :code:`step` argument is a boolean that indicates whether we want the option of seeing
a more detailed representation of the state at each step, which we'll discuss in the
section following this one.
.. code:: python
def run(workflow, step):
workflow.do_engine_steps()
while not workflow.is_completed():
ready_tasks = workflow.get_ready_user_tasks()
options = { }
print()
for idx, task in enumerate(ready_tasks):
option = format_task(task, False)
options[str(idx + 1)] = task
print(f'{idx + 1}. {option}')
selected = None
while selected not in options and selected not in ['', 'D', 'd']:
selected = input('Select task to complete, enter to wait, or D to dump the workflow state: ')
if selected.lower() == 'd':
filename = input('Enter filename: ')
state = BpmnSerializer().serialize_workflow(workflow, include_spec=True)
with open(filename, 'w') as dump:
dump.write(state)
elif selected != '':
next_task = options[selected]
if isinstance(next_task.task_spec, UserTask):
complete_user_task(next_task)
next_task.complete()
elif isinstance(next_task.task_spec, ManualTask):
complete_manual_task(next_task)
next_task.complete()
else:
next_task.complete()
workflow.refresh_waiting_tasks()
workflow.do_engine_steps()
if step:
print_state(workflow)
print('\nWorkflow Data')
print(json.dumps(workflow.data, indent=2, separators=[ ', ', ': ' ]))
The first line of this function is the one that does the bulk of the work in
SpiffWorkflow. Calling :code:`workflow.do_engine_steps()` causes Spiff to repeatedly
look for and execute any engine tasks that are ready.
An **engine task** does not require user interaction. For instance, it could be
a Script task or selection of a flow from a gateway. Execution will
stop when only interactive tasks remain or the workflow is completed.
A SpiffWorkflow application will call :code:`workflow.do_engine_steps()` to start the
workflow and then enter a loop that will
- check for ready user tasks
- present the tasks to the user to complete
- complete the tasks
- refresh any waiting tasks
- complete any engine tasks that have been reached via user interactions
until the workflow completes.
When a workflow completes, the task data (just a dictionary passed from one task to the
next, and optionally modified by each task) is copied into the workflow data. We display
the end state of the workflow on completion.
The rest of the code is all about presenting the tasks to the user and dumping the
workflow state. We've covered former in the BPMN Elements section of :doc:`index`
and will cover the latter in :doc:`advanced`.
Handling task presentation is what **you** will be developing when you use SpiffWorkflow.
Examining the Workflow State
----------------------------
When this application is run and we want to present steps to the user, we'll need
to be able to examine the workflow and task states and associated data. We'll cover
the basics of this in this section.
The code below is a simple method for displaying information about a task. We use
this in two ways
- presenting a list of tasks to a user (in this case the state will always be ready, so we won't include it)
- presenting the state of each task while stepping through the workflow (in this case you most likely do want to know the state).
.. code:: python
def format_task(task, include_state=True):
if hasattr(task.task_spec, 'lane') and task.task_spec.lane is not None:
lane = f'[{task.task_spec.lane}]'
else:
lane = ''
state = f'[{task.get_state_name()}]' if include_state else ''
return f'{lane} {task.task_spec.description} ({task.task_spec.name}) {state}'
We previously went over obtaining the lane information in :doc:`organization`.
We can call :code:`task.get_state_name()` to get a human-readable representation of
a task's state.
We store the value provided in the :code:`name` attribute of the task (the text
entered in the 'Name' field in our sample models) in :code:`task.task_spec.description`.
Here is the code we use for examining the workflow state.
.. code:: python
def print_state(workflow):
task = workflow.last_task
print('\nLast Task')
print(format_task(task))
print(json.dumps(task.data, indent=2, separators=[ ', ', ': ' ]))
display_types = (UserTask, ManualTask, ScriptTask, ThrowingEvent, CatchingEvent)
all_tasks = [ task for task in workflow.get_tasks() if isinstance(task.task_spec, display_types) ]
upcoming_tasks = [ task for task in all_tasks if task.state in [Task.READY, Task.WAITING] ]
print('\nUpcoming Tasks')
for idx, task in enumerate(upcoming_tasks):
print(format_task(task))
if input('\nShow all tasks? ').lower() == 'y':
for idx, task in enumerate(all_tasks):
print(format_task(task))
We can find out what the last task was with :code:`workflow.last_task`. We'll print
its information as described above, as well as a dump of its data.
We can get a list of all tasks regardless of type or state with :code:`workflow.get_tasks()`.
The actual list of tasks will get quite long (some tasks are expanded internally by Spiff into
multiple tasks, and all gateways and events are also treated as "tasks"). So we're filtering
the tasks to only display the ones that would have salience to a user here.
We'll further filter those tasks for :code:`READY` and :code:`WAITING` tasks for a more
compact display, and only show all tasks when explicitly called for.
This is a very simple application, so our interactions with tasks are very basic. You will
definitely want to see the 'Navigation List' section of :doc:`advanced` for more sophisticated
ways of managing workflow state.

222
doc/bpmn/tasks.rst Normal file
View File

@ -0,0 +1,222 @@
Tasks
=====
BPMN Model
----------
In this example, we'll model a customer selecting a product to illustrate
the basic task types that can be used with SpiffWorkflow.
We'll be using the following files from `spiff-example-cli <https://github.com/sartography/spiff-example-cli>`_.
- `task_types <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/task_types.bpmn>`_ workflow
- `product_prices <https://github.com/sartography/spiff-example-cli/blob/master/bpmn/product_prices.dmn>`_ DMN table
User Tasks
^^^^^^^^^^
User tasks would typically be used in the case where the task would be
completed from within the application.
User tasks can include forms that ask the user questions. When you click on a
user task in a BPMN modeler, the Properties Panel includes a form tab. Use this
tab to build your questions.
We'll ask our hypothetical user to choose a product and quantity.
The following example shows how a form might be set up in Camumda.
.. figure:: figures/user_task.png
:scale: 30%
:align: center
User Task configuration
.. note::
SpiffWorkflow has some basic support for the free Camunda modeler, to use its
form building capabilities, but we intend to encapsulate this support in an
extension module and remove it from the core library eventually.
See the `Handling User Tasks`_ section for a discussion of sample code.
Business Rule Tasks
^^^^^^^^^^^^^^^^^^^
In our business rule task, we'll use a DMN table to look up the price of the
product the user chose.
We'll need to create a DMN table.
What is DMN?
++++++++++++
Decision Model and Notation (DMN) is a standard for business decision
modeling. DMN allows modelers to separate decision logic from process logic
and maintain it in a table format. DMN is linked into BPMN with a *decision
task*.
With DMN, business analysts can model the rules that lead to a decision
in an easy to read table. Those tables can be executed directly by SpiffWorkflow.
This minimizes the risk of misunderstandings between business analysts and
developers, and allows rapid changes in production.
BPMN includes a decision task that refers to the decision table. The outcome of
the decision lookup allows the next gateway or activity to route the flow.
Our Business Rule Task will make use of a DMN table.
.. figure:: figures/dmn_table.png
:scale: 30%
:align: center
DMN Table
.. note::
We add quote marks around the product names in the table. Spiff will
create an expression based on the exact contents of the table, so if
the quotes are omitted, the content will be interpreted as a variable
rather than a string.
Then we'll refer to this table in the task configuration.
.. figure:: figures/business_rule_task.png
:scale: 30%
:align: center
Business Rule Task configuration
Script Tasks
^^^^^^^^^^^^
The total order cost will need to be calculated on the fly. We can do this in
a script task. We'll configure the task with some simple Python code.
.. figure:: figures/script_task.png
:scale: 30%
:align: center
Script Task configuration
The code in the script will have access to the task data, so variables that
have been defined previously will be available to it.
Manual Tasks
^^^^^^^^^^^^
Our final task type is a manual task. We would use this task in the situation
where the application might simply need to mark a task that requires user
involvement complete without gathering any additional information from them.
There is no special configuration for manual tasks. However, this is a good
place to note that we can use the BPMN element Documentation field to display
more information about the context of the item.
Spiff is set up in a way that you could use any templating library you want, but
we have used `Jinja <https://jinja.palletsprojects.com/en/3.0.x/>`_.
In this example, we'll present an order summary to our customer.
.. figure:: figures/documentation.png
:scale: 30%
:align: center
Element Documentation
See the `Handling Manual Tasks`_ section for a discussion of sample code.
Running The Model
^^^^^^^^^^^^^^^^^
If you have set up our example repository, this model can be run with the
following command:
.. code-block:: console
./run.py -p order_product -d bpmn/product_prices.dmn -b bpmn/task_types.bpmn
Example Application Code
------------------------
Handling User Tasks
^^^^^^^^^^^^^^^^^^^
We will need to provide a way to display the form data and collect the user's
responses.
.. code:: python
for field in task.task_spec.form.fields:
if isinstance(field, EnumFormField):
option_map = dict([ (opt.name, opt.id) for opt in field.options ])
options = "(" + ', '.join(option_map) + ")"
prompt = f"{field.label} {options} "
option = select_option(prompt, option_map.keys())
response = option_map[option]
else:
response = input(f"{field.label} ")
if field.type == "long":
response = int(response)
task.update_data_var(field.id, response)
The list of form fields for a task is stored in :code:`task.task_spec.form_fields`.
For Enumerated fields, we want to get the possible options and present them to the
user. The variable names of the fields were stored in :code:`field.id`, but since
we set labels for each of the fields, we'd like to display those instead, and map
the user's selection back to the variable name.
Our :code:`select_option` function simply repeats the prompt until the user
enters a value contained in the option list.
For other fields, we'll just store whatever the user enters, although in the case
where they data type was specified to be a :code:`long`, we'll convert it to a
number.
Finally, we need to explicitly store the user-provided response in a variable
with the expected name with :code:`task.update_data_var(field.id, response)`.
Handling Business Rule Tasks
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We do not need to do any special configuration to handle these business rule
tasks. SpiffWorkflow does it all for us.
Handling Script Tasks
^^^^^^^^^^^^^^^^^^^^^
We do not need to do any special configuration to handle script tasks, although it
is possible to implement a custom script engine. We demonstrate that process in
Custom Script Engines section :doc:`advanced` features. However, the default script
engine will work in many cases.
Handling Manual Tasks
^^^^^^^^^^^^^^^^^^^^^
Our code for manual tasks simply asks the user to confirm that the task has been
completed.
.. code:: python
def complete_manual_task(task):
display_task(task)
input("Press any key to mark task complete")
:code:`display_task()` is the code for converting the Documentation property of the task
into something that can be presented to the user.
.. code:: python
def display_task(task):
print(f'\n{task.task_spec.description}')
if task.task_spec.documentation is not None:
template = Template(task.task_spec.documentation)
print(template.render(task.data))
The template string can be obtained from :code:`task.task_spec.documentation`.
As noted above, our template class comes from Jinja. We render the template
using the task data, which is just a dictionary.

64
doc/conf.py Normal file
View File

@ -0,0 +1,64 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'SpiffWorkflow'
copyright = '2022, Sartography'
author = 'Sartography'
# The full version, including alpha/beta/rc tags
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', # 'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx_rtd_theme',
#'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Set the master index file.
master_doc = 'index'
# Set the fav-icon
html_favicon = 'favicon.ico'

99
doc/development.rst Normal file
View File

@ -0,0 +1,99 @@
SpiffWorkflow Concepts
====================================
Specification vs. Workflow Instance
-----------------------------------
One critical concept to know about SpiffWorkflow is the difference between a
:class:`SpiffWorkflow.specs.WorkflowSpec` and :class:`SpiffWorkflow.Workflow` and
the difference between a :class:`SpiffWorkflow.specs.TaskSpec` and :class:`SpiffWorkflow.Task`.
In order to understand how to handle a running workflow consider the following process::
Choose product -> Choose amount -> Produce product A
`--> Produce product B
As you can see, in this case the process resembles a simple tree. *Choose product*,
*Choose amount*, *Produce product A*, and *Produce product B* are all specific kinds
of *task specifications*, and the whole process is a *workflow specification*.
But when you execute the workflow, the path taken does not necessarily have the same shape. For example, if the user chooses to produce 3 items of product A, the path taken looks like the following::
Choose product -> Choose amount -> Produce product A
|--> Produce product A
`--> Produce product A
This is the reason why you will find two different categories of objects in Spiff Workflow:
- **Specification objects** (WorkflowSpec and TaskSpec) represent the workflow definition, and
- **derivation tree objects** (Workflow and Task) model the task tree that represents the state of a running workflow.
Understanding task states
-------------------------
The following task states exist:
.. image:: figures/state-diagram.png
The states are reached in a strict order and the lines in the diagram show the possible state transitions.
The order of these state transitions is violated only in one case: A *Trigger* task may add additional work to a task that was already COMPLETED, causing it to change the state back to FUTURE.
- **MAYBE** means that the task will possibly, but not necessarily run at a future time. This means that it can not yet be fully determined as to whether or not it may run, for example, because the execution still depends on the outcome of an ExclusiveChoice task in the path that leads towards it.
- **LIKELY** is like MAYBE, except it is considered to have a higher probability of being reached because the path leading towards it is the default choice in an ExclusiveChoice task.
- **FUTURE** means that the processor has predicted that this this path will be taken and this task will, at some point, definitely run. (Unless the task is explicitly set to CANCELLED, which can not be predicted.) If a task is waiting on predecessors to run then it is in FUTURE state (not WAITING).
- **WAITING** means *I am in the process of doing my work and have not finished. When the work is finished, then I will be READY for completion and will go to READY state*. WAITING is an optional state.
- **READY** means "the preconditions for marking this task as complete are met".
- **COMPLETED** means that the task is done.
- **CANCELLED** means that the task was explicitly cancelled, for example by a CancelTask operation.
Associating data with a workflow
--------------------------------
The difference between *specification objects* and *derivation tree objects* is also important when choosing how to store data in a workflow. Spiff Workflow supports storing data in two ways:
- **Task spec data** is stored in the TaskSpec object. In other words, if a task causes task spec data to change, that change is reflected to all other instances in the derivation tree that use the TaskSpec object.
- **Task data** is local to the Task object, but is carried along to the children of each Task object in the derivation tree as the workflow progresses.
Internal Details
----------------
A **derivation tree** is created based off of the spec using a hierarchy of
:class:`SpiffWorkflow.Task` objects (not :class:`SpiffWorkflow.specs.TaskSpec` objects!).
Each Task contains a reference to the TaskSpec that generated it.
Think of a derivation tree as tree of execution paths (some, but not all, of
which will end up executing). Each Task object is basically a node in the
derivation tree. Each task in the tree links back to its parent (there are
no connection objects). The processing is done by walking down the
derivation tree one Task at a time and moving the task (and its
children) through the sequence of states towards completion.
You can serialize/deserialize specs. You can also
serialize/deserialize a running workflow (it will pull in its spec as well).
There's a decent eventing model that allows you to tie in to and receive
events (for each task, you can get event notifications from its TaskSpec).
The events correspond with how the processing is going in the derivation
tree, not necessarily how the workflow as a whole is moving.
See :class:`SpiffWorkflow.specs.TaskSpec` for docs on events.
You can nest workflows (using the :class:`SpiffWorkflow.specs.SubWorkflowSpec`).
The serialization code is done well which makes it easy to add new formats
if we need to support them.
Other documentation
-------------------
**API documentation** is currently embedded into the Spiff Workflow source code and not yet made available in a prettier form.
If you need more help, please create an issue in our
`issue tracker <https://github.com/knipknap/SpiffWorkflow/issues>`_.

BIN
doc/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 100 KiB

BIN
doc/figures/bpmnbook.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

BIN
doc/figures/classes.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

BIN
doc/figures/events.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 264 KiB

BIN
doc/figures/interaction.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 89 KiB

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1xkruxt" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:collaboration id="Collaboration_1w57zzo">
<bpmn:participant id="Participant_General" name="General" processRef="Process_1vkk3fm" />
<bpmn:participant id="Participant_President" name="President" processRef="Process_1gfhjhz" />
</bpmn:collaboration>
<bpmn:process id="Process_1vkk3fm" isExecutable="true">
<bpmn:startEvent id="StartEvent_1" />
<bpmn:userTask id="Task_1sxhab3" name="Confirm">
<bpmn:outgoing>SequenceFlow_1xrbp0m</bpmn:outgoing>
</bpmn:userTask>
<bpmn:exclusiveGateway id="ExclusiveGateway_1vw2t9k">
<bpmn:incoming>SequenceFlow_1xrbp0m</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1vwfrws</bpmn:outgoing>
<bpmn:outgoing>SequenceFlow_0x0u589</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:endEvent id="EndEvent_0vugqhl">
<bpmn:incoming>SequenceFlow_1vwfrws</bpmn:incoming>
</bpmn:endEvent>
<bpmn:task id="Task_1kt5lea">
<bpmn:incoming>SequenceFlow_0x0u589</bpmn:incoming>
</bpmn:task>
<bpmn:sequenceFlow id="SequenceFlow_1xrbp0m" sourceRef="Task_1sxhab3" targetRef="ExclusiveGateway_1vw2t9k" />
<bpmn:sequenceFlow id="SequenceFlow_1vwfrws" sourceRef="ExclusiveGateway_1vw2t9k" targetRef="EndEvent_0vugqhl" />
<bpmn:sequenceFlow id="SequenceFlow_0x0u589" sourceRef="ExclusiveGateway_1vw2t9k" targetRef="Task_1kt5lea" />
</bpmn:process>
<bpmn:process id="Process_1gfhjhz" isExecutable="false">
<bpmn:task id="Task_1svb9le" name="Confirm" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_1w57zzo">
<bpmndi:BPMNShape id="Participant_1l4ojm7_di" bpmnElement="Participant_General" isHorizontal="true">
<dc:Bounds x="129" y="79" width="600" height="331" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="182" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Participant_0vy6n49_di" bpmnElement="Participant_President" isHorizontal="true">
<dc:Bounds x="129" y="350" width="600" height="250" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_07e6zu6_di" bpmnElement="Task_1sxhab3">
<dc:Bounds x="260" y="160" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1xrbp0m_di" bpmnElement="SequenceFlow_1xrbp0m">
<di:waypoint x="360" y="200" />
<di:waypoint x="415" y="200" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ExclusiveGateway_1thgwy9_di" bpmnElement="ExclusiveGateway_1vw2t9k" isMarkerVisible="true">
<dc:Bounds x="415" y="175" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_0vugqhl_di" bpmnElement="EndEvent_0vugqhl">
<dc:Bounds x="552" y="182" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1vwfrws_di" bpmnElement="SequenceFlow_1vwfrws">
<di:waypoint x="465" y="200" />
<di:waypoint x="552" y="200" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Task_1svb9le_di" bpmnElement="Task_1svb9le">
<dc:Bounds x="390" y="430" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Task_1kt5lea_di" bpmnElement="Task_1kt5lea">
<dc:Bounds x="560" y="270" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0x0u589_di" bpmnElement="SequenceFlow_0x0u589">
<di:waypoint x="440" y="225" />
<di:waypoint x="440" y="310" />
<di:waypoint x="560" y="310" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 165 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.7 KiB

BIN
doc/images/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

54
doc/index.rst Normal file
View File

@ -0,0 +1,54 @@
.. image:: https://travis-ci.com/sartography/SpiffWorkflow.svg?branch=master
:target: https://travis-ci.org/sartography/SpiffWorkflow
.. image:: https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=alert_status
:target: https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow
.. image:: https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=coverage
:target: https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow
:alt: Coverage
.. image:: https://img.shields.io/github/stars/sartography/SpiffWorkflow.svg
:target: https://github.com/sartography/SpiffWorkflow/stargazers
.. image:: https://img.shields.io/github/license/sartography/SpiffWorkflow.svg
:target: https://github.com/sartography/SpiffWorkflow/blob/master/COPYING
What is SpiffWorkflow?
======================
.. image:: images/logo.png
:align: center
SpiffWorkflow allows your python application to process BPMN diagrams (think
of them as very powerful flow charts, See :doc:`intro`.) to accomplish
what would otherwise require writing a lot of complex business logic in your
code. You can use these diagrams to accomplish a number of tasks, such as:
- Creating a questionnaire with multiple complex paths
- Implement an approval process that requires input from multiple users
- Allow non-programmers to modify the flow and behavior of your application.
License
-------
Spiff Workflow is published under the terms of the
`GNU Lesser General Public License (LGPL) Version 3 <https://www.gnu.org/licenses/lgpl-3.0.txt>`_.
Support
-------
You can find us on `our Discord Channel <https://discord.gg/zDEBEnrF>`_
Commercial support for SpiffWorkflow is available from
`Sartography <https://sartography.com>`_
Contents
--------
.. toctree::
:maxdepth: 2
intro
bpmn/index
development
non-bpmn/index

125
doc/intro.rst Normal file
View File

@ -0,0 +1,125 @@
Overview
========
BPMN and SpiffWorkflow
----------------------
.. sidebar:: BPMN Resources
This guide is a mere introduction to BPMN.
For more serious modeling, we recommend looking for more comprehensive
resources. We have used the `books by Bruce Silver <https://www.amazon.com/Bruce-Silver/e/B0062AXUFY/ref=dp_byline_cont_pop_book_1>`_
as a guide for our BPMN modeling.
.. image:: figures/bpmnbook.jpg
:align: center
Business Process Model and Notation (BPMN) is a diagramming language for
specifying business processes. BPMN links the realms of business and IT, and
creates a common process language that can be shared between the two.
BPMN describes details of process behaviors efficiently in a diagram. The
meaning is precise enough to describe the technical details that control
process execution in an automation engine. SpiffWorkflow allows you to create
code to directly execute a BPMN diagram.
When using SpiffWorkflow, a client can manipulate the BPMN diagram and still
have their product work without a need for you to edit the Python code,
improving response and turnaround time.
Today, nearly every process modeling tool supports BPMN in some fashion making
it a great tool to learn and use.
To use SpiffWorkflow, you need at least a basic understanding of BPMN.
This page offers a brief overview. There are many resources for additional
information about BPMN.
.. sidebar:: BPMN Modelers
There are a number of modelers in existence, and any BPMN compliant modeler should work.
SpiffWorkflow has some basic support for the free Camunda modeler, to use it's form building
capabilities, but we intend to encapsulate this support in an extension module and remove
it from the core library eventually. It does help for making some examples and demonstrating
how one might implement user tasks in an online environment.
In these examples and throughout the documentation we use the
`BPMN.js <https://bpmn.io/toolkit/bpmn-js/>`_ BPMN Modeler.
A Simple Workflow
-----------------
All BPMN models have a start event and at least one end event. The start event
is represented with a single thin border circle. An end event is represented
by a single thick border circle.
The following example also has one task, represented by the rectangle with curved corners.
.. figure:: figures/simplestworkflow.png
:scale: 25%
:align: center
A simple workflow.
The sequence flow is represented with a solid line connector. When the node at
the tail of a sequence flow completes, the node at the arrowhead is enabled to start.
A More Complicated Workflow
---------------------------
.. figure:: figures/ExclusiveGateway.png
:scale: 25%
:align: center
A workflow with a gateway
In this example, the diamond shape is called a gateway. It represents a branch
point in our flow. This gateway is an exclusive data-based gateway (also
called an XOR gateway). With an exclusive gateway, you must take one path or
the other based on some data condition. BPMN has other gateway types.
The important point is that we can use a gateway to add a branch in the
workflow **without** creating an explicit branch in our Python code.
Events
------
In the above simple workflows, all of the transitions are deterministic and we
have direct connections between tasks. We need to handle the cases where an event
may or may not happen and link these events in different parts of the workflow.
BPMN has a comprehensive suite of event elements that can used for this purpose.
SpiffWorkflow does not support every single BPMN event type, but it can handle
many of them.
.. figure:: figures/events.png
:scale: 25%
:align: center
A workflow containing events
We've already seen plain Start and End Events. BPMN also include the concepts
of Intermediate Events (standalone events that may be Throwing or Catching) as well
as Boundary Events (which can only be Caught).
All Start Events are inherently Catching Events (a workflow can be initiated if a
particular event occurs) and all End Events are Throwing Events (they can convey
the final state of a workflow or path to other tasks and workflows).
If an Intermediate Throwing Event is added to a flow, the event it represents
will be generated and the flow will continue immediately. If an Intermediate
Catching Event is added to a flow, the workflow will wait to catch the event it
represents before advancing.
A Boundary Event represents an event that may be caught only while a particular task
is being executed and comes in two types: Interrupting (in which case the task it is
attached to will be cancelled if the event is received) or Non-Interrupting (in
which case the task will continue). In both cases, flows may emanate from the
Boundary Event, which will trigger those paths if the events occur while the task
is being executed.

35
doc/make.bat Normal file
View File

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

View File

@ -0,0 +1,65 @@
Implementing Custom Tasks
=========================
Introduction
------------
In this second tutorial we are going to implement our own task, and
use serialization and deserialization to store and restore it.
If you haven't already, you should complete the first
:doc:`../tutorial/index`.
We are also assuming that you are familiar with the :doc:`../basics`.
Implementing the custom task
----------------------------
The first step is to create a :class:`SpiffWorkflow.specs.TaskSpec` that
fires the rocket::
from SpiffWorkflow.specs import Simple
class NuclearStrike(Simple):
def _on_complete_hook(self, my_task):
print("Rocket sent!")
Save this file as ``strike.py``.
Now, before we are ready to define the workflow using XML or JSON, we will
also have extend the serializer to let SpiffWorkflow know how to represent
your NuclearStrike first.
Preparing a serializer
----------------------
Before we can use JSON to specify a workflow, we first need to teach
SpiffWorkflow what our custom `NuclearChoice` looks like in JSON.
We do this by extending the
:mod:`SpiffWorkflow.serializer.json.JSONSerializer`.
.. literalinclude:: serializer.py
We save the serializer as ``serializer.py``.
We also need to update ``strike.py`` as follows:
We also implement the deserializer:
.. literalinclude:: strike.py
That is all! You are now ready to create the specification from JSON.
Creating a workflow specification (using JSON)
----------------------------------------------
Now we can use the NuclearStrike in the workflow specification in JSON.
Note that this specification is the same as in our first tutorial,
except that it references our class `strike.NuclearStrike`.
.. literalinclude:: nuclear.json
Using the custom serializer and task
------------------------------------
Here we use our brand new serializer in practice:
.. literalinclude:: start.py

View File

@ -0,0 +1,93 @@
{
"task_specs": {
"Start": {
"class": "SpiffWorkflow.specs.StartTask.StartTask",
"manual": false,
"outputs": [
"general"
]
},
"general": {
"class": "SpiffWorkflow.specs.ExclusiveChoice.ExclusiveChoice",
"name": "general",
"manual": true,
"inputs": [
"Start"
],
"outputs": [
"workflow_aborted",
"president"
],
"choice": null,
"default_task_spec": "workflow_aborted",
"cond_task_specs": [
[
[
"SpiffWorkflow.operators.Equal",
[
[
"Attrib",
"confirmation"
],
[
"value",
"yes"
]
]
],
"president"
]
]
},
"president": {
"class": "SpiffWorkflow.specs.ExclusiveChoice.ExclusiveChoice",
"name": "president",
"manual": true,
"inputs": [
"general"
],
"outputs": [
"workflow_aborted",
"nuclear_strike"
],
"choice": null,
"default_task_spec": "workflow_aborted",
"cond_task_specs": [
[
[
"SpiffWorkflow.operators.Equal",
[
[
"Attrib",
"confirmation"
],
[
"value",
"yes"
]
]
],
"nuclear_strike"
]
]
},
"nuclear_strike": {
"class": "strike.NuclearStrike",
"name": "nuclear_strike",
"inputs": [
"president"
]
},
"workflow_aborted": {
"class": "SpiffWorkflow.specs.Cancel.Cancel",
"name": "workflow_aborted",
"inputs": [
"general",
"president"
]
}
},
"description": "",
"file": null,
"name": ""
}

View File

@ -0,0 +1,11 @@
from SpiffWorkflow.serializer.json import JSONSerializer
from strike import NuclearStrike
class NuclearSerializer(JSONSerializer):
def serialize_nuclear_strike(self, task_spec):
return self.serialize_task_spec(task_spec)
def deserialize_nuclear_strike(self, wf_spec, s_state):
spec = NuclearStrike(wf_spec, s_state['name'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec

View File

@ -0,0 +1,18 @@
import json
from SpiffWorkflow import Workflow
from SpiffWorkflow.specs import WorkflowSpec
from serializer import NuclearSerializer
# Load from JSON
with open('nuclear.json') as fp:
workflow_json = fp.read()
serializer = NuclearSerializer()
spec = WorkflowSpec.deserialize(serializer, workflow_json)
# Create the workflow.
workflow = Workflow(spec)
# Execute until all tasks are done or require manual intervention.
# For the sake of this tutorial, we ignore the "manual" flag on the
# tasks. In practice, you probably don't want to do that.
workflow.complete_all(halt_on_manual=False)

View File

@ -0,0 +1,12 @@
from SpiffWorkflow.specs import Simple
class NuclearStrike(Simple):
def _on_complete_hook(self, my_task):
print((self.my_variable, "sent!"))
def serialize(self, serializer):
return serializer.serialize_nuclear_strike(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_nuclear_strike(wf_spec, s_state)

12
doc/non-bpmn/index.rst Normal file
View File

@ -0,0 +1,12 @@
Non-BPMN support
================
We have maintained support for legacy non-BPMN workflows, but we recommend using
SpiffWorkflow with BPMN, as this is where current development is focused.
.. toctree::
:maxdepth: 2
tutorial/index
custom-tasks/index
patterns

77
doc/non-bpmn/patterns.rst Normal file
View File

@ -0,0 +1,77 @@
.. _patterns:
Supported Workflow Patterns
===========================
.. HINT::
All examples are located
`here <https://github.com/knipknap/SpiffWorkflow/blob/master/tests/SpiffWorkflow/data/spiff/>`_.
Control-Flow Patterns
---------------------
1. Sequence [control-flow/sequence.xml]
2. Parallel Split [control-flow/parallel_split.xml]
3. Synchronization [control-flow/synchronization.xml]
4. Exclusive Choice [control-flow/exclusive_choice.xml]
5. Simple Merge [control-flow/simple_merge.xml]
6. Multi-Choice [control-flow/multi_choice.xml]
7. Structured Synchronizing Merge [control-flow/structured_synchronizing_merge.xml]
8. Multi-Merge [control-flow/multi_merge.xml]
9. Structured Discriminator [control-flow/structured_discriminator.xml]
10. Arbitrary Cycles [control-flow/arbitrary_cycles.xml]
11. Implicit Termination [control-flow/implicit_termination.xml]
12. Multiple Instances without Synchronization [control-flow/multi_instance_without_synch.xml]
13. Multiple Instances with a Priori Design-Time Knowledge [control-flow/multi_instance_with_a_priori_design_time_knowledge.xml]
14. Multiple Instances with a Priori Run-Time Knowledge [control-flow/multi_instance_with_a_priori_run_time_knowledge.xml]
15. Multiple Instances without a Priori Run-Time Knowledge [control-flow/multi_instance_without_a_priori.xml]
16. Deferred Choice [control-flow/deferred_choice.xml]
17. Interleaved Parallel Routing [control-flow/interleaved_parallel_routing.xml]
18. Milestone [control-flow/milestone.xml]
19. Cancel Task [control-flow/cancel_task.xml]
20. Cancel Case [control-flow/cancel_case.xml]
21. *NOT IMPLEMENTED*
22. Recursion [control-flow/recursion.xml]
23. Transient Trigger [control-flow/transient_trigger.xml]
24. Persistent Trigger [control-flow/persistent_trigger.xml]
25. Cancel Region [control-flow/cancel_region.xml]
26. Cancel Multiple Instance Task [control-flow/cancel_multi_instance_task.xml]
27. Complete Multiple Instance Task [control-flow/complete_multiple_instance_activity.xml]
28. Blocking Discriminator [control-flow/blocking_discriminator.xml]
29. Cancelling Discriminator [control-flow/cancelling_discriminator.xml]
30. Structured Partial Join [control-flow/structured_partial_join.xml]
31. Blocking Partial Join [control-flow/blocking_partial_join.xml]
32. Cancelling Partial Join [control-flow/cancelling_partial_join.xml]
33. Generalized AND-Join [control-flow/generalized_and_join.xml]
34. Static Partial Join for Multiple Instances [control-flow/static_partial_join_for_multi_instance.xml]
35. Cancelling Partial Join for Multiple Instances [control-flow/cancelling_partial_join_for_multi_instance.xml]
36. Dynamic Partial Join for Multiple Instances [control-flow/dynamic_partial_join_for_multi_instance.xml]
37. Acyclic Synchronizing Merge [control-flow/acyclic_synchronizing_merge.xml]
38. General Synchronizing Merge [control-flow/general_synchronizing_merge.xml]
39. Critical Section [control-flow/critical_section.xml]
40. Interleaved Routing [control-flow/interleaved_routing.xml]
41. Thread Merge [control-flow/thread_merge.xml]
42. Thread Split [control-flow/thread_split.xml]
43. Explicit Termination [control-flow/explicit_termination.xml]
Workflow Data Patterns
----------------------
1. Task Data [data/task_data.xml]
2. Block Data [data/block_data.xml]
3. *NOT IMPLEMENTED*
4. *NOT IMPLEMENTED*
5. *NOT IMPLEMENTED*
6. *NOT IMPLEMENTED*
7. *NOT IMPLEMENTED*
8. *NOT IMPLEMENTED*
9. Task to Task [data/task_to_task.xml]
10. Block Task to Sub-Workflow Decomposition [data/block_to_subworkflow.xml]
11. Sub-Workflow Decomposition to Block Task [data/subworkflow_to_block.xml]
Specs that have no corresponding workflow pattern on workflowpatterns.com
-------------------------------------------------------------------------
- Execute - spawns a subprocess and waits for the results
- Transform - executes commands that can be used for data transforms
- Celery - executes a Celery task (see http://celeryproject.org/)

View File

@ -0,0 +1,7 @@
from SpiffWorkflow import Workflow
from SpiffWorkflow.serializer.json import JSONSerializer
serializer = JSONSerializer()
with open('workflow.json') as fp:
workflow_json = fp.read()
workflow = Workflow.deserialize(serializer, workflow_json)

View File

@ -0,0 +1,7 @@
from SpiffWorkflow.specs import WorkflowSpec
from SpiffWorkflow.serializer.json import JSONSerializer
serializer = JSONSerializer()
with open('workflow-spec.json') as fp:
workflow_json = fp.read()
spec = WorkflowSpec.deserialize(serializer, workflow_json)

View File

@ -0,0 +1,104 @@
Tutorial - Non-BPMN
===================
Introduction
------------
In this chapter we are going to use Spiff Workflow to solve a real-world
problem: We will create a workflow for triggering a nuclear strike.
We are assuming that you are familiar with the :doc:`../basics`.
Assume you want to send the rockets, but only after both the president and
a general have signed off on it.
There are two different ways of defining a workflow: Either by deserializing
(from XML or JSON), or using Python.
Creating the workflow specification (using Python)
--------------------------------------------------
As a first step, we are going to create a simple workflow in code.
In Python, the workflow is defined as follows:
.. literalinclude:: nuclear.py
Hopefully the code is self explaining.
Using Python to write a workflow can quickly become tedious. It is
usually a better idea to use another format.
Creating a workflow specification (using JSON)
----------------------------------------------
Once you have completed the serializer as shown above, you can
write the specification in JSON.
Here is an example that is doing exactly the same as the Python
WorkflowSpec above:
.. literalinclude:: nuclear.json
Creating a workflow out of the specification
--------------------------------------------
Now it is time to get started and actually create and execute
a workflow according to the specification.
Since we included *manual* tasks in the specification, you will want
to implement a user interface in practice, but we are just going to
assume that all tasks are automatic for this tutorial.
Note that the *manual* flag has no effect on the control flow; it is
just a flag that a user interface may use to identify tasks that
require a user input.
.. literalinclude:: start.py
:meth:`SpiffWorkflow.Workflow.complete_all` completes all tasks in
accordance to the specification, until no further tasks are READY
for being executed.
Note that this does not mean that the workflow is completed after
calling :meth:`SpiffWorkflow.Workflow.complete_all`, since some
tasks may be WAITING, or may be blocked by another WAITING task,
for example.
Serializing a workflow
----------------------
If you want to store a :class:`SpiffWorkflow.specs.WorkflowSpec`, you can
use :meth:`SpiffWorkflow.specs.WorkflowSpec.serialize`:
.. literalinclude:: serialize.py
If you want to store a :class:`SpiffWorkflow.Workflow`, use
use :meth:`SpiffWorkflow.Workflow.serialize`:
.. literalinclude:: serialize-wf.py
Deserializing a workflow
------------------------
The following example shows how to restore a
:class:`SpiffWorkflow.specs.WorkflowSpec` using
:meth:`SpiffWorkflow.specs.WorkflowSpec.serialize`.
.. literalinclude:: deserialize.py
To restore a :class:`SpiffWorkflow.Workflow`, use
:meth:`SpiffWorkflow.Workflow.serialize` instead:
.. literalinclude:: deserialize-wf.py
Where to go from here?
----------------------
This first tutorial actually has a problem: If you want to save the workflow,
SpiffWorkflow won't be able to re-connect the signals because it can not
save the reference to your code.
So after deserializing the workflow, you will need to re-connect the signals
yourself.
If you would rather have it such that SpiffWorkflow handles this for you,
you need to create a custom task and tell SpiffWorkflow how to
serialize and deserialize it. The next tutorial shows how this is done.

View File

@ -0,0 +1,98 @@
{
"task_specs": {
"Start": {
"class": "SpiffWorkflow.specs.StartTask.StartTask",
"id" : 1,
"manual": false,
"outputs": [
2
]
},
"general": {
"class": "SpiffWorkflow.specs.ExclusiveChoice.ExclusiveChoice",
"name": "general",
"id" : 2,
"manual": true,
"inputs": [
1
],
"outputs": [
5,
3
],
"choice": null,
"default_task_spec": "workflow_aborted",
"cond_task_specs": [
[
[
"SpiffWorkflow.operators.Equal",
[
[
"Attrib",
"confirmation"
],
[
"value",
"yes"
]
]
],
"president"
]
]
},
"president": {
"class": "SpiffWorkflow.specs.ExclusiveChoice.ExclusiveChoice",
"name": "president",
"id" : 3,
"manual": true,
"inputs": [
2
],
"outputs": [
5,
4
],
"choice": null,
"default_task_spec": "workflow_aborted",
"cond_task_specs": [
[
[
"SpiffWorkflow.operators.Equal",
[
[
"Attrib",
"confirmation"
],
[
"value",
"yes"
]
]
],
"nuclear_strike"
]
]
},
"nuclear_strike": {
"id" : 4,
"class": "SpiffWorkflow.specs.Simple.Simple",
"name": "nuclear_strike",
"inputs": [
3
]
},
"workflow_aborted": {
"id" : 5,
"class": "SpiffWorkflow.specs.Cancel.Cancel",
"name": "workflow_aborted",
"inputs": [
2,
3
]
}
},
"description": "",
"file": null,
"name": ""
}

View File

@ -0,0 +1,36 @@
from SpiffWorkflow.specs import WorkflowSpec, ExclusiveChoice, Simple, Cancel
from SpiffWorkflow.operators import Equal, Attrib
def my_nuclear_strike(msg):
print("Launched:", msg)
class NuclearStrikeWorkflowSpec(WorkflowSpec):
def __init__(self):
WorkflowSpec.__init__(self)
# The first step of our workflow is to let the general confirm
# the nuclear strike.
general_choice = ExclusiveChoice(self, 'general')
self.start.connect(general_choice)
# The default choice of the general is to abort.
cancel = Cancel(self, 'workflow_aborted')
general_choice.connect(cancel)
# Otherwise, we will ask the president to confirm.
president_choice = ExclusiveChoice(self, 'president')
cond = Equal(Attrib('confirmation'), 'yes')
general_choice.connect_if(cond, president_choice)
# The default choice of the president is to abort.
president_choice.connect(cancel)
# Otherwise, we will perform the nuclear strike.
strike = Simple(self, 'nuclear_strike')
president_choice.connect_if(cond, strike)
# Now we connect our Python function to the Task named 'nuclear_strike'
strike.completed_event.connect(my_nuclear_strike)
# As soon as all tasks are either "completed" or "aborted", the
# workflow implicitely ends.

View File

@ -0,0 +1,14 @@
import json
from SpiffWorkflow import Workflow
from SpiffWorkflow.serializer.json import JSONSerializer
from nuclear import NuclearStrikeWorkflowSpec
serializer = JSONSerializer()
spec = NuclearStrikeWorkflowSpec()
workflow = Workflow(spec)
data = workflow.serialize(serializer)
# This next line is unnecessary in practice; it just makes the JSON pretty.
pretty = json.dumps(json.loads(data), indent=4, separators=(',', ': '))
open('workflow.json', 'w').write(pretty)

View File

@ -0,0 +1,12 @@
import json
from SpiffWorkflow.serializer.json import JSONSerializer
from nuclear import NuclearStrikeWorkflowSpec
serializer = JSONSerializer()
spec = NuclearStrikeWorkflowSpec()
data = spec.serialize(serializer)
# This next line is unnecessary in practice; it just makes the JSON pretty.
pretty = json.dumps(json.loads(data), indent=4, separators=(',', ': '))
open('workflow-spec.json', 'w').write(pretty)

View File

@ -0,0 +1,25 @@
import json
from SpiffWorkflow.workflow import Workflow
from SpiffWorkflow.specs import WorkflowSpec
from SpiffWorkflow.serializer.json import JSONSerializer
# Load from JSON
with open('nuclear.json') as fp:
workflow_json = fp.read()
serializer = JSONSerializer()
spec = WorkflowSpec.deserialize(serializer, workflow_json)
# Alternatively, create an instance of the Python based specification.
#from nuclear import NuclearStrikeWorkflowSpec
#spec = NuclearStrikeWorkflowSpec()
# Create the workflow.
workflow = Workflow(spec)
# Execute until all tasks are done or require manual intervention.
# For the sake of this tutorial, we ignore the "manual" flag on the
# tasks. In practice, you probably don't want to do that.
workflow.complete_all(halt_on_manual=False)
# Alternatively, this is what a UI would do for a manual task.
#workflow.complete_task_from_id(...)

6255
graphics/business_end.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 360 KiB

BIN
graphics/color_pallet.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

BIN
graphics/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

BIN
graphics/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1012 B

30
graphics/favicon.svg Normal file
View File

@ -0,0 +1,30 @@
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.com/svgjs" width="37.221199mm" height="35.099442mm"><svg width="37.221199mm" height="35.099442mm" viewBox="0 0 37.221199 35.099442" version="1.1" id="SvgjsSvg1019" inkscape:version="1.1.2 (1:1.1+202202050950+0a00cf5339)" sodipodi:docname="logo_symbol_only.svg" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview id="SvgjsSodipodi:namedview1018" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" inkscape:pageshadow="2" inkscape:pageopacity="0.0" inkscape:pagecheckerboard="0" inkscape:document-units="mm" showgrid="false" showguides="true" inkscape:guide-bbox="true" inkscape:zoom="1.0973349" inkscape:cx="43.742343" inkscape:cy="56.956175" inkscape:window-width="1916" inkscape:window-height="1076" inkscape:window-x="0" inkscape:window-y="0" inkscape:window-maximized="1" inkscape:current-layer="layer1" fit-margin-top="0" fit-margin-left="0" fit-margin-right="0" fit-margin-bottom="0">
<inkscape:grid type="xygrid" id="SvgjsInkscape:grid1017" originx="461.53406" originy="-851.65943"></inkscape:grid>
</sodipodi:namedview>
<defs id="SvgjsDefs1016">
<marker style="overflow:visible" id="SvgjsMarker1015" refX="0" refY="0" orient="auto" inkscape:stockid="Arrow2Send" inkscape:isstock="true">
<path transform="matrix(-0.3,0,0,-0.3,0.69,0)" d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z" style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:0.625;stroke-linejoin:round" id="SvgjsPath1014"></path>
</marker>
<marker style="overflow:visible" id="SvgjsMarker1013" refX="0" refY="0" orient="auto" inkscape:stockid="Arrow1Send" inkscape:isstock="true">
<path transform="matrix(-0.2,0,0,-0.2,-1.2,0)" style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt" d="M 0,0 5,-5 -12.5,0 5,5 Z" id="SvgjsPath1012"></path>
</marker>
<marker style="overflow:visible" id="SvgjsMarker1011" refX="0" refY="0" orient="auto" inkscape:stockid="Arrow1Send" inkscape:isstock="true">
<path transform="matrix(-0.2,0,0,-0.2,-1.2,0)" style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt" d="M 0,0 5,-5 -12.5,0 5,5 Z" id="SvgjsPath1010"></path>
</marker>
<marker style="overflow:visible" id="SvgjsMarker1009" refX="0" refY="0" orient="auto" inkscape:stockid="Arrow1Lend" inkscape:isstock="true">
<path transform="matrix(-0.8,0,0,-0.8,-10,0)" style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt" d="M 0,0 5,-5 -12.5,0 5,5 Z" id="SvgjsPath1008"></path>
</marker>
<marker style="overflow:visible" id="SvgjsMarker1007" refX="0" refY="0" orient="auto" inkscape:stockid="Arrow1Lstart" inkscape:isstock="true">
<path transform="matrix(0.8,0,0,0.8,10,0)" style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt" d="M 0,0 5,-5 -12.5,0 5,5 Z" id="SvgjsPath1006"></path>
</marker>
</defs>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="SvgjsG1005" transform="translate(461.53405,-851.65939)">
<path style="fill:#000000;stroke-width:0.264583" id="SvgjsPath1004" d=""></path>
<path style="fill:#000000;stroke-width:0.264583" id="SvgjsPath1003" d=""></path>
<text xml:space="preserve" style="font-size:4.23333px;line-height:125%;font-family:NanumMyeongjo;-inkscape-font-specification:NanumMyeongjo;letter-spacing:0px;word-spacing:0px;fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" x="-250.71158" y="668.76953" id="SvgjsText1002" svgjs:data="{&quot;leading&quot;:&quot;1.3&quot;}"><tspan sodipodi:role="line" id="SvgjsTspan1001" style="stroke-width:0.264583px" x="-250.71158" y="668.76953" svgjs:data="{&quot;leading&quot;:&quot;1.3&quot;}"></tspan></text>
<path id="SvgjsPath1000" style="vector-effect:none;fill:#126d82;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:4.433;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" d="m -436.04162,851.65939 v 0.91595 c 0.96107,0.84548 1.66181,1.94689 2.02041,3.17567 h -17.54738 c -3.11685,0 -5.68164,2.56482 -5.68164,5.68159 v 3.91192 c 0,3.11682 2.56479,5.68164 5.68164,5.68164 h 5.6851 4.69113 5.6878 c 1.21493,0 2.13344,0.91846 2.13344,2.13344 v 3.91632 c 0,1.21498 -0.91851,2.13346 -2.13344,2.13346 h -14.79076 c -0.75878,-2.29982 -2.93713,-3.97943 -5.47735,-3.97943 -3.16218,0 -5.76138,2.60267 -5.76138,5.76489 0,3.1622 2.5992,5.76399 5.76138,5.76399 2.54974,0 4.73517,-1.69176 5.48615,-4.00482 h 14.78196 c 3.1168,0 5.67808,-2.5613 5.67808,-5.67809 v -3.91632 c 0,-3.11677 -2.56128,-5.68164 -5.67808,-5.68164 h -5.6878 -4.69113 -5.6851 c -1.21497,0 -2.13609,-0.91837 -2.13609,-2.13344 v -3.91192 c 0,-1.21499 0.92112,-2.13696 2.13609,-2.13696 h 17.60609 c -0.33391,1.31874 -1.05865,2.50576 -2.07912,3.4053 v 0.68721 l 11.72877,-5.86483 z m -19.73105,27.11871 c 1.24555,0 2.21936,0.97116 2.21936,2.21674 0,1.24556 -0.97381,2.21936 -2.21936,2.21936 -1.24559,0 -2.21675,-0.9738 -2.21675,-2.21936 0,-1.24558 0.97116,-2.21674 2.21675,-2.21674 z" sodipodi:nodetypes="cccssssccsssscssscssssccsssscccccsssss"></path>
</g>
</svg><style>@media (prefers-color-scheme: light) { :root { filter: none; } }
@media (prefers-color-scheme: dark) { :root { filter: contrast(0.7407407407407407) brightness(2); } }
</style></svg>

After

Width:  |  Height:  |  Size: 5.6 KiB

BIN
graphics/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

254
graphics/logo.svg Normal file
View File

@ -0,0 +1,254 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="177.62154mm"
height="36.387508mm"
viewBox="0 0 177.62154 36.387508"
version="1.1"
id="svg5"
inkscape:version="1.1.2 (1:1.1+202202050950+0a00cf5339)"
sodipodi:docname="logo.svg"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo2.png"
inkscape:export-xdpi="300"
inkscape:export-ydpi="300"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview7"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:document-units="mm"
showgrid="false"
showguides="true"
inkscape:guide-bbox="true"
inkscape:zoom="0.09699162"
inkscape:cx="-30.930507"
inkscape:cy="-582.52455"
inkscape:window-width="1916"
inkscape:window-height="1076"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="layer1"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0">
<inkscape:grid
type="xygrid"
id="grid72980"
originx="923.06812"
originy="-1703.3188" />
</sodipodi:namedview>
<defs
id="defs2">
<marker
style="overflow:visible"
id="Arrow2Send"
refX="0"
refY="0"
orient="auto"
inkscape:stockid="Arrow2Send"
inkscape:isstock="true">
<path
transform="matrix(-0.3,0,0,-0.3,0.69,0)"
d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:0.625;stroke-linejoin:round"
id="path8760" />
</marker>
<marker
style="overflow:visible"
id="marker9093"
refX="0"
refY="0"
orient="auto"
inkscape:stockid="Arrow1Send"
inkscape:isstock="true">
<path
transform="matrix(-0.2,0,0,-0.2,-1.2,0)"
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
id="path9091" />
</marker>
<marker
style="overflow:visible"
id="Arrow1Send"
refX="0"
refY="0"
orient="auto"
inkscape:stockid="Arrow1Send"
inkscape:isstock="true">
<path
transform="matrix(-0.2,0,0,-0.2,-1.2,0)"
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
id="path8742" />
</marker>
<marker
style="overflow:visible"
id="Arrow1Lend"
refX="0"
refY="0"
orient="auto"
inkscape:stockid="Arrow1Lend"
inkscape:isstock="true">
<path
transform="matrix(-0.8,0,0,-0.8,-10,0)"
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
id="path8730" />
</marker>
<marker
style="overflow:visible"
id="Arrow1Lstart"
refX="0"
refY="0"
orient="auto"
inkscape:stockid="Arrow1Lstart"
inkscape:isstock="true">
<path
transform="matrix(0.8,0,0,0.8,10,0)"
style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
id="path8727" />
</marker>
</defs>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(461.53405,-851.65939)">
<path
style="fill:#000000;stroke-width:0.264583"
id="path45562"
d="" />
<path
style="fill:#000000;stroke-width:0.264583"
id="path45542"
d="" />
<text
xml:space="preserve"
style="font-size:4.23333px;line-height:125%;font-family:NanumMyeongjo;-inkscape-font-specification:NanumMyeongjo;letter-spacing:0px;word-spacing:0px;fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
x="-250.71158"
y="668.76953"
id="text59672"><tspan
sodipodi:role="line"
id="tspan59670"
style="stroke-width:0.264583px"
x="-250.71158"
y="668.76953" /></text>
<path
d="m -419.85028,869.85972 q 2.10753,0 3.40104,1.63919 1.30466,1.63918 1.30466,4.80606 0,2.11867 -0.6133,3.56829 -0.61331,1.43847 -1.69494,2.17443 -1.08165,0.73597 -2.48666,0.73597 -0.90323,0 -1.54998,-0.22302 -0.64675,-0.23417 -1.10395,-0.591 -0.45719,-0.36798 -0.79171,-0.78057 h -0.17842 q 0.0892,0.44604 0.13383,0.91439 0.0446,0.46833 0.0446,0.91437 v 5.02907 h -3.40104 v -17.95302 h 2.76543 l 0.47949,1.6169 h 0.15605 q 0.33452,-0.5018 0.81401,-0.92554 0.47949,-0.42374 1.14855,-0.66905 0.68021,-0.25647 1.57228,-0.25647 z m -1.09279,2.72082 q -0.89207,0 -1.41617,0.36799 -0.52408,0.36798 -0.76942,1.10393 -0.23416,0.73597 -0.25646,1.86222 v 0.36798 q 0,1.2043 0.22301,2.04062 0.23418,0.83631 0.76942,1.27121 0.54639,0.43488 1.49423,0.43488 0.78056,0 1.28236,-0.43488 0.50179,-0.4349 0.74711,-1.27121 0.25646,-0.84748 0.25646,-2.06292 0,-1.82875 -0.56869,-2.75428 -0.56869,-0.92554 -1.76185,-0.92554 z"
id="path105589-9"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -410.11335,870.09388 v 12.46676 h -3.40104 v -12.46676 z m -1.69494,-4.8841 q 0.75826,0 1.30466,0.35683 0.54639,0.34568 0.54639,1.30466 0,0.94782 -0.54639,1.31581 -0.5464,0.35682 -1.30466,0.35682 -0.76942,0 -1.31581,-0.35682 -0.53524,-0.36799 -0.53524,-1.31581 0,-0.95898 0.53524,-1.30466 0.54639,-0.35683 1.31581,-0.35683 z"
id="path105591-5"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
d="m -391.15216,872.64745 h -2.94385 v 9.91319 h -3.40103 v -9.91319 h -1.87336 v -1.63918 l 1.87336,-0.91439 v -0.91437 q 0,-1.59458 0.53524,-2.4755 0.54639,-0.89209 1.52768,-1.24891 0.99243,-0.36799 2.34169,-0.36799 0.99245,0 1.80646,0.16725 0.81401,0.15605 1.32696,0.35684 l -0.86978,2.4978 q -0.39027,-0.12263 -0.84747,-0.22301 -0.45719,-0.10041 -1.04819,-0.10041 -0.71365,0 -1.04818,0.43488 -0.32338,0.42374 -0.32338,1.09279 v 0.78056 h 2.94385 z"
id="path106049-0"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
d="m -400.25375,872.64745 h -2.94384 v 9.91319 h -3.40104 v -9.91319 h -1.87335 v -1.63918 l 1.87335,-0.91439 v -0.91437 q 0,-1.59458 0.53525,-2.4755 0.5464,-0.89209 1.52767,-1.24891 0.99244,-0.36799 2.34171,-0.36799 0.99243,0 1.80644,0.16725 0.81403,0.15605 1.32697,0.35684 l -0.86978,2.4978 q -0.39028,-0.12263 -0.84746,-0.22301 -0.45719,-0.10041 -1.04819,-0.10041 -0.71366,0 -1.0482,0.43488 -0.32337,0.42374 -0.32337,1.09279 v 0.78056 h 2.94384 z"
id="path105593-48"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -367.2924,866.25797 -4.14815,16.30267 h -3.93629 l -2.20788,-8.56393 q -0.0668,-0.24532 -0.17842,-0.74711 -0.11144,-0.50178 -0.23417,-1.09278 -0.12264,-0.60216 -0.22302,-1.12626 -0.0892,-0.53524 -0.12263,-0.84746 -0.0334,0.31222 -0.13383,0.83632 -0.0892,0.5241 -0.21187,1.11509 -0.11144,0.591 -0.22303,1.10394 -0.11144,0.51294 -0.1784,0.78057 l -2.19675,8.54162 h -3.92512 l -4.1593,-16.30267 h 3.40103 l 2.08522,8.89845 q 0.0892,0.40143 0.20073,0.95898 0.12263,0.55753 0.23417,1.17084 0.12264,0.60216 0.21185,1.17085 0.10042,0.55755 0.14503,0.97013 0.0558,-0.42373 0.14502,-0.98128 0.0892,-0.56869 0.18956,-1.14854 0.11144,-0.591 0.22303,-1.0928 0.11144,-0.50179 0.20071,-0.81401 l 2.37516,-9.13262 h 3.26722 l 2.37515,9.13262 q 0.078,0.30107 0.17842,0.81401 0.11144,0.5018 0.22301,1.0928 0.11144,0.591 0.20073,1.15969 0.10041,0.55755 0.14502,0.97013 0.078,-0.55754 0.21187,-1.34926 0.14502,-0.80287 0.30107,-1.59459 0.16724,-0.79171 0.28993,-1.32695 l 2.07408,-8.89845 z"
id="path105595-7"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -355.51468,876.30497 q 0,1.56112 -0.42374,2.76542 -0.41257,1.2043 -1.21544,2.04062 -0.79173,0.82517 -1.91797,1.24891 -1.11508,0.42374 -2.52011,0.42374 -1.3158,0 -2.41974,-0.42374 -1.09279,-0.42374 -1.90682,-1.24891 -0.80287,-0.83632 -1.2489,-2.04062 -0.43489,-1.2043 -0.43489,-2.76542 0,-2.07409 0.73597,-3.51256 0.73595,-1.43846 2.09636,-2.18557 1.36042,-0.74712 3.24494,-0.74712 1.75069,0 3.09996,0.74712 1.3604,0.74711 2.12982,2.18557 0.78056,1.43847 0.78056,3.51256 z m -8.61967,0 q 0,1.2266 0.26762,2.06292 0.26763,0.83631 0.83632,1.26005 0.56871,0.42374 1.48308,0.42374 0.90323,0 1.46077,-0.42374 0.56871,-0.42374 0.82517,-1.26005 0.26762,-0.83632 0.26762,-2.06292 0,-1.23777 -0.26762,-2.05178 -0.25646,-0.82517 -0.82517,-1.23775 -0.5687,-0.41259 -1.48307,-0.41259 -1.34927,0 -1.96256,0.92553 -0.60216,0.92552 -0.60216,2.77659 z"
id="path105597-17"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -347.07121,869.85972 q 0.25648,0 0.59102,0.0334 0.34566,0.0222 0.55753,0.0668 l -0.25646,3.18918 q -0.16725,-0.0558 -0.47949,-0.078 -0.30108,-0.0334 -0.52411,-0.0334 -0.65789,0 -1.28235,0.16724 -0.61331,0.16725 -1.10394,0.54639 -0.49064,0.36799 -0.78056,0.98129 -0.27879,0.60214 -0.27879,1.48307 v 6.34489 h -3.40103 v -12.46675 h 2.57588 l 0.50178,2.09639 h 0.16724 q 0.36797,-0.63561 0.91437,-1.15971 0.55755,-0.53525 1.26006,-0.84746 0.71366,-0.32338 1.53882,-0.32338 z"
id="path105599-2"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -341.03634,865.20978 v 7.76105 q 0,0.7025 -0.0558,1.40503 -0.0558,0.7025 -0.12264,1.40501 h 0.0446 q 0.34568,-0.49064 0.70252,-0.97013 0.36797,-0.47949 0.78057,-0.92554 l 3.49023,-3.79132 h 3.83593 l -4.95101,5.40822 5.25208,7.05854 h -3.92513 l -3.5906,-5.05137 -1.46078,1.17085 v 3.88052 h -3.40103 v -17.35086 z"
id="path105601-7"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -323.25392,872.64745 h -2.94385 v 9.91319 h -3.40103 v -9.91319 h -1.87336 v -1.63918 l 1.87336,-0.91439 v -0.91437 q 0,-1.59458 0.53524,-2.4755 0.54639,-0.89209 1.52768,-1.24891 0.99243,-0.36799 2.34169,-0.36799 0.99245,0 1.80646,0.16725 0.81401,0.15605 1.32696,0.35684 l -0.86978,2.4978 q -0.39027,-0.12263 -0.84747,-0.22301 -0.45719,-0.10041 -1.04819,-0.10041 -0.71365,0 -1.04818,0.43488 -0.32338,0.42374 -0.32338,1.09279 v 0.78056 h 2.94385 z"
id="path105603-22"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -317.17448,882.56064 h -3.40103 v -17.35086 h 3.40103 z"
id="path105605-6"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -303.60161,876.30497 q 0,1.56112 -0.42372,2.76542 -0.41259,1.2043 -1.21546,2.04062 -0.79171,0.82517 -1.91797,1.24891 -1.11508,0.42374 -2.5201,0.42374 -1.31581,0 -2.41975,-0.42374 -1.09279,-0.42374 -1.90682,-1.24891 -0.80287,-0.83632 -1.24889,-2.04062 -0.4349,-1.2043 -0.4349,-2.76542 0,-2.07409 0.73597,-3.51256 0.73595,-1.43846 2.09637,-2.18557 1.36041,-0.74712 3.24493,-0.74712 1.75069,0 3.09996,0.74712 1.3604,0.74711 2.12982,2.18557 0.78056,1.43847 0.78056,3.51256 z m -8.61967,0 q 0,1.2266 0.26763,2.06292 0.26762,0.83631 0.83631,1.26005 0.56871,0.42374 1.48309,0.42374 0.90323,0 1.46076,-0.42374 0.56871,-0.42374 0.82517,-1.26005 0.26762,-0.83632 0.26762,-2.06292 0,-1.23777 -0.26762,-2.05178 -0.25646,-0.82517 -0.82517,-1.23775 -0.5687,-0.41259 -1.48307,-0.41259 -1.34925,0 -1.96256,0.92553 -0.60216,0.92552 -0.60216,2.77659 z"
id="path105607-1"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
d="m -291.205,882.56064 -0.95898,-4.36001 q -0.078,-0.39029 -0.25648,-1.14855 -0.17842,-0.76942 -0.39027,-1.6392 -0.20073,-0.88092 -0.37915,-1.62804 -0.16724,-0.7471 -0.24532,-1.09278 h -0.10041 q -0.078,0.34568 -0.24532,1.09278 -0.16724,0.74712 -0.37913,1.62804 -0.20071,0.88093 -0.37913,1.66149 -0.17842,0.76942 -0.26762,1.17085 l -1.00359,4.31542 h -3.6575 l -3.546,-12.46676 h 3.38989 l 1.43847,5.51973 q 0.14503,0.57985 0.27877,1.38272 0.13383,0.7917 0.23417,1.53882 0.11144,0.73597 0.16725,1.17085 h 0.0892 q 0.0222,-0.32338 0.0892,-0.85862 0.078,-0.53525 0.16725,-1.10394 0.10041,-0.57985 0.17842,-1.03704 0.0892,-0.46835 0.13383,-0.63561 l 1.53882,-5.97691 h 3.74671 l 1.46078,5.97691 q 0.078,0.32338 0.20071,1.02588 0.13382,0.70252 0.23417,1.44964 0.10041,0.73595 0.11144,1.15969 h 0.0892 q 0.0446,-0.37913 0.15604,-1.12624 0.11144,-0.74712 0.25648,-1.56115 0.15605,-0.82515 0.31223,-1.405 l 1.49423,-5.51973 h 3.33412 l -3.5906,12.46676 z"
id="path105609-0"
style="font-weight:bold;font-size:13.4639px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';letter-spacing:0px;word-spacing:0px;fill:#126d82;fill-opacity:1;stroke-width:1.42731px"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<path
id="path73257-7-2-9-159"
style="vector-effect:none;fill:#126d82;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:4.433;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m -436.04162,851.65939 v 0.91595 c 0.96107,0.84548 1.66181,1.94689 2.02041,3.17567 h -17.54738 c -3.11685,0 -5.68164,2.56482 -5.68164,5.68159 v 3.91192 c 0,3.11682 2.56479,5.68164 5.68164,5.68164 h 5.6851 4.69113 5.6878 c 1.21493,0 2.13344,0.91846 2.13344,2.13344 v 3.91632 c 0,1.21498 -0.91851,2.13346 -2.13344,2.13346 h -14.79076 c -0.75878,-2.29982 -2.93713,-3.97943 -5.47735,-3.97943 -3.16218,0 -5.76138,2.60267 -5.76138,5.76489 0,3.1622 2.5992,5.76399 5.76138,5.76399 2.54974,0 4.73517,-1.69176 5.48615,-4.00482 h 14.78196 c 3.1168,0 5.67808,-2.5613 5.67808,-5.67809 v -3.91632 c 0,-3.11677 -2.56128,-5.68164 -5.67808,-5.68164 h -5.6878 -4.69113 -5.6851 c -1.21497,0 -2.13609,-0.91837 -2.13609,-2.13344 v -3.91192 c 0,-1.21499 0.92112,-2.13696 2.13609,-2.13696 h 17.60609 c -0.33391,1.31874 -1.05865,2.50576 -2.07912,3.4053 v 0.68721 l 11.72877,-5.86483 z m -19.73105,27.11871 c 1.24555,0 2.21936,0.97116 2.21936,2.21674 0,1.24556 -0.97381,2.21936 -2.21936,2.21936 -1.24559,0 -2.21675,-0.9738 -2.21675,-2.21936 0,-1.24558 0.97116,-2.21674 2.21675,-2.21674 z"
sodipodi:nodetypes="cccssssccsssscssscssssccsssscccccsssss"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.18046px;line-height:125%;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.448779px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
x="-421.95959"
y="859.76959"
id="text109870-49"
inkscape:export-filename="/home/dan/code/workflow/SpiffWorkflow/graphics/logo_med.png"
inkscape:export-xdpi="50.258995"
inkscape:export-ydpi="50.258995"><tspan
sodipodi:role="line"
id="tspan109868-0"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.448779px"
x="-421.95959"
y="859.76959">Draw the code</tspan></text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 19 KiB

BIN
graphics/logo2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

BIN
graphics/logo_icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

BIN
graphics/logo_med.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Some files were not shown because too many files have changed in this diff Show More