Squashed 'SpiffWorkflow/' content from commit 63db3e4
git-subtree-dir: SpiffWorkflow git-subtree-split: 63db3e45947ec66b8d0efc2c74064004f8ff482c
This commit is contained in:
commit
7c219fd731
|
@ -0,0 +1,9 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 79
|
|
@ -0,0 +1,33 @@
|
|||
name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
jobs:
|
||||
build-n-publish:
|
||||
name: Build and publish Python 🐍 distributions 📦 to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install pypa/build
|
||||
run: >-
|
||||
python -m
|
||||
pip install
|
||||
build
|
||||
--user
|
||||
- name: Build a binary wheel and a source tarball
|
||||
run: >-
|
||||
python -m
|
||||
build
|
||||
--sdist
|
||||
--wheel
|
||||
--outdir dist/
|
||||
- name: Publish distribution 📦 to PyPI
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
username: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
|
@ -0,0 +1,279 @@
|
|||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/python,intellij+all,macos
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=python,intellij+all,macos
|
||||
|
||||
### Intellij+all ###
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# AWS User-specific
|
||||
.idea/**/aws.xml
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
# .idea/artifacts
|
||||
# .idea/compiler.xml
|
||||
# .idea/jarRepositories.xml
|
||||
# .idea/modules.xml
|
||||
# .idea/*.iml
|
||||
# .idea/modules
|
||||
# *.iml
|
||||
# *.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
### Intellij+all Patch ###
|
||||
# Ignores the whole .idea folder and all .iml files
|
||||
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
|
||||
|
||||
.idea/
|
||||
|
||||
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
|
||||
|
||||
*.iml
|
||||
modules.xml
|
||||
.idea/misc.xml
|
||||
*.ipr
|
||||
|
||||
# Sonarlint plugin
|
||||
.idea/sonarlint
|
||||
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python,intellij+all,macos
|
||||
|
||||
*.py[co]
|
||||
*.swp
|
||||
dist
|
||||
/build
|
||||
*.egg-info
|
||||
unit_test.cfg
|
||||
nosetests.xml
|
||||
.coverage
|
||||
coverage.xml
|
||||
.c9revisions
|
||||
.idea
|
||||
/venv
|
|
@ -0,0 +1,16 @@
|
|||
# .readthedocs.yaml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
|
||||
build:
|
||||
image: latest
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF
|
||||
formats: []
|
||||
|
||||
python:
|
||||
pip_install: true
|
||||
version: 3.7
|
||||
extra_requirements:
|
||||
- docs
|
|
@ -0,0 +1,7 @@
|
|||
sonar.organization=sartography
|
||||
sonar.projectKey=sartography_SpiffWorkflow
|
||||
sonar.host.url=https://sonarcloud.io
|
||||
sonar.exclusions=*.bpmn,*.dmn,doc/**
|
||||
sonar.sources=SpiffWorkflow
|
||||
sonar.test.inclusions=tests
|
||||
sonar.python.coverage.reportPaths=tests/SpiffWorkflow/coverage.xml
|
|
@ -0,0 +1 @@
|
|||
python 3.10.4
|
|
@ -0,0 +1,36 @@
|
|||
dist: focal
|
||||
language: python
|
||||
|
||||
python:
|
||||
- 3.7
|
||||
- 3.8
|
||||
- 3.9
|
||||
- 3.10
|
||||
|
||||
addons:
|
||||
sonarcloud:
|
||||
organization: sartography
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
- pip install celery
|
||||
|
||||
script:
|
||||
- cd tests/SpiffWorkflow
|
||||
- coverage run --source=SpiffWorkflow -m unittest discover -v . "*Test.py"
|
||||
- coverage xml -i
|
||||
- cd ../..
|
||||
|
||||
after_success:
|
||||
- sonar-scanner
|
||||
|
||||
git:
|
||||
depth: false
|
||||
|
||||
jobs:
|
||||
include:
|
||||
- python: 3.7
|
||||
- python: 3.8
|
||||
- python: 3.9
|
||||
- python: 3.10
|
||||
env: RUN_QUALITY_GATES=true
|
|
@ -0,0 +1,5 @@
|
|||
Samuel Abels <http://github.com/knipknap/>
|
||||
Ziad Sawalha <http://github.com/ziadsawalha/>
|
||||
Matthew Hampton <http://github.com/matthewhampton/>
|
||||
Kelly McDonald
|
||||
Dan Funk
|
|
@ -0,0 +1,60 @@
|
|||
Guide for Contributors
|
||||
#######################
|
||||
|
||||
Coding style:
|
||||
|
||||
Please follow PEP8: http://www.python.org/dev/peps/pep-0008/
|
||||
|
||||
Testing:
|
||||
|
||||
Non-public classes and methods MUST be prefixed by _. This is also important
|
||||
because the test and API documentation machinery makes assumptions based on
|
||||
this convention.
|
||||
|
||||
Every added public class MUST have a corresponding unit test. The tests are
|
||||
placed in the following directory: tests/SpiffWorkflow/
|
||||
The test directory layout mirrors the source code directory layout, e.g.
|
||||
SpiffWorkflow/specs/Join.py
|
||||
has a corresponding test in
|
||||
tests/SpiffWorkflow/specs/JoinTest.py
|
||||
|
||||
The unit test for each class MUST have a CORRELATE class attribute that points
|
||||
to the tested class. (The test machinery uses this attribute to find untested
|
||||
methods.)
|
||||
|
||||
Each commit MUST NOT break functionality. In other words, the code in the
|
||||
repository should function at any time, and all test MUST pass.
|
||||
|
||||
Documentation:
|
||||
|
||||
Every public class and function or method MUST include API documentation. The
|
||||
documentation MUST cover the method's arguments and return values.
|
||||
|
||||
Write inline documentation generously.
|
||||
|
||||
Repository:
|
||||
|
||||
Make sure that each commit contains related changes only. E.g. don't fix
|
||||
two unrelated bugs in one commit, or introduce a new feature while refactoring
|
||||
another part of the program in the same commit. When in doubt, use multiple
|
||||
small commits. In general, most commits should be relatively small unless they
|
||||
are plain additions.
|
||||
|
||||
Licensing:
|
||||
|
||||
You have to agree to licensing under the lGPLv3, and every added file MUST
|
||||
include a copyright header.
|
||||
|
||||
If you modify a file and add a chunk of at least 7 lines in size, please add
|
||||
yourself to the copyright header of that file.
|
||||
|
||||
## Releases
|
||||
For you dev op folks who release builds to the larger community ...
|
||||
|
||||
Be sure to edit the conf.py, and update the release tag: doc/conf.py
|
||||
And also edit setup.py and assure that has the same release tag.
|
||||
New versions of SpiffWorkflow are automatically published to PyPi whenever
|
||||
a maintainer of our GitHub repository creates a new release on GitHub. This
|
||||
is managed through GitHub's actions. The configuration of which can be
|
||||
found in .github/workflows/....
|
||||
Just create a release in GitHub that mathches the release number in doc/conf.py
|
|
@ -0,0 +1,165 @@
|
|||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
|
@ -0,0 +1,4 @@
|
|||
FROM python:3.6
|
||||
RUN apt-get -y update && apt-get upgrade -yu
|
||||
COPY . /tmp/SpiffWorkflow
|
||||
RUN cd /tmp/SpiffWorkflow && make wheel && pip install dist/SpiffWorkflow*.whl
|
|
@ -0,0 +1,3 @@
|
|||
To install this package, run
|
||||
|
||||
sudo python setup.py install --prefix /usr/local
|
|
@ -0,0 +1,88 @@
|
|||
NAME=SpiffWorkflow
|
||||
VERSION=`python setup.py --version`
|
||||
PREFIX=/usr/local/
|
||||
BIN_DIR=$(PREFIX)/bin
|
||||
SITE_DIR=$(PREFIX)`python -c "import sys; from distutils.sysconfig import get_python_lib; print get_python_lib()[len(sys.prefix):]"`
|
||||
|
||||
###################################################################
|
||||
# Standard targets.
|
||||
###################################################################
|
||||
.PHONY : clean
|
||||
clean:
|
||||
find . -name "*.pyc" -o -name "*.pyo" | xargs -rn1 rm -f
|
||||
find . -name "*.egg-info" | xargs -rn1 rm -r
|
||||
rm -Rf build
|
||||
cd doc; make clean
|
||||
|
||||
.PHONY : dist-clean
|
||||
dist-clean: clean
|
||||
rm -Rf dist
|
||||
|
||||
.PHONY : doc
|
||||
doc:
|
||||
cd doc; make
|
||||
|
||||
install:
|
||||
mkdir -p $(SITE_DIR)
|
||||
./version.sh
|
||||
export PYTHONPATH=$(SITE_DIR):$(PYTHONPATH); \
|
||||
python setup.py install --prefix $(PREFIX) \
|
||||
--install-scripts $(BIN_DIR) \
|
||||
--install-lib $(SITE_DIR)
|
||||
./version.sh --reset
|
||||
|
||||
uninstall:
|
||||
# Sorry, Python's distutils support no such action yet.
|
||||
|
||||
.PHONY : tests
|
||||
tests:
|
||||
cd tests/$(NAME)
|
||||
PYTHONPATH=../.. python -m unittest discover -v . "*Test.py"
|
||||
|
||||
.PHONY : tests-cov
|
||||
tests-cov:
|
||||
cd tests/$(NAME)
|
||||
coverage run --source=$(NAME) -m unittest discover -v . "*Test.py"
|
||||
|
||||
.PHONY : tests-ind
|
||||
tests-ind:
|
||||
cd tests/$(NAME)
|
||||
@PYTHONPATH=../.. find . -name "*Test.py" -printf '%p' -exec python -m unittest {} \;
|
||||
|
||||
.PHONY : tests-timing
|
||||
tests-timing:
|
||||
@make tests-ind 2>&1 | ./scripts/test_times.py
|
||||
|
||||
###################################################################
|
||||
# Package builders.
|
||||
###################################################################
|
||||
targz: clean
|
||||
./version.sh
|
||||
python setup.py sdist --formats gztar
|
||||
./version.sh --reset
|
||||
|
||||
tarbz: clean
|
||||
./version.sh
|
||||
python setup.py sdist --formats bztar
|
||||
./version.sh --reset
|
||||
|
||||
wheel: clean
|
||||
./version.sh
|
||||
python setup.py bdist_wheel --universal
|
||||
./version.sh --reset
|
||||
|
||||
deb: clean
|
||||
./version.sh
|
||||
debuild -S -sa
|
||||
cd ..; sudo pbuilder build $(NAME)_$(VERSION)-0ubuntu1.dsc; cd -
|
||||
./version.sh --reset
|
||||
|
||||
dist: targz tarbz wheel
|
||||
|
||||
###################################################################
|
||||
# Publishers.
|
||||
###################################################################
|
||||
dist-publish:
|
||||
./version.sh
|
||||
python setup.py bdist_wheel --universal upload
|
||||
./version.sh --reset
|
|
@ -0,0 +1,121 @@
|
|||
## SpiffWorkflow
|
||||
![Logo](./graphics/logo_med.png)
|
||||
|
||||
Spiff Workflow is a workflow engine implemented in pure Python. It is based on
|
||||
the excellent work of the Workflow Patterns initiative. In 2020 and 2021,
|
||||
extensive support was added for BPMN / DMN processing.
|
||||
|
||||
## Motivation
|
||||
We created SpiffWorkflow to support the development of low-code business
|
||||
applications in Python. Using BPMN will allow non-developers to describe
|
||||
complex workflow processes in a visual diagram, coupled with a powerful python
|
||||
script engine that works seamlessly within the diagrams. SpiffWorkflow can parse
|
||||
these diagrams and execute them. The ability for businesses to create
|
||||
clear, coherent diagrams that drive an application has far reaching potential.
|
||||
While multiple tools exist for doing this in Java, we believe that wide
|
||||
adoption of the Python Language, and it's ease of use, create a winning
|
||||
strategy for building Low-Code applications.
|
||||
|
||||
|
||||
## Build status
|
||||
[![Build Status](https://travis-ci.com/sartography/SpiffWorkflow.svg?branch=master)](https://travis-ci.org/sartography/SpiffWorkflow)
|
||||
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=alert_status)](https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow)
|
||||
[![Coverage](https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=coverage)](https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow)
|
||||
[![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=sartography_SpiffWorkflow&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=sartography_SpiffWorkflow)
|
||||
[![Documentation Status](https://readthedocs.org/projects/spiffworkflow/badge/?version=latest)](http://spiffworkflow.readthedocs.io/en/latest/?badge=latest)
|
||||
[![Issues](https://img.shields.io/github/issues/sartography/spiffworkflow)](https://github.com/sartography/SpiffWorkflow/issues)
|
||||
[![Pull Requests](https://img.shields.io/github/issues-pr/sartography/spiffworkflow)](https://github.com/sartography/SpiffWorkflow/pulls)
|
||||
|
||||
## Code style
|
||||
|
||||
[![PEP8](https://img.shields.io/badge/code%20style-pep8-orange.svg)](https://www.python.org/dev/peps/pep-0008/)
|
||||
|
||||
|
||||
## Dependencies
|
||||
We've worked to minimize external dependencies. We rely on lxml for parsing
|
||||
XML Documents, and there is some legacy support for Celery, but it is not
|
||||
core to the implementation, it is just a way to interconnect these systems.
|
||||
<b>Built with</b>
|
||||
- [lxml](https://lxml.de/)
|
||||
- [celery](https://docs.celeryproject.org/en/stable/)
|
||||
|
||||
## Features
|
||||
* __BPMN__ - support for parsing BPMN diagrams, including the more complex
|
||||
components, like pools and lanes, multi-instance tasks, sub-workflows, timer
|
||||
events, signals, messages, boudary events and looping.
|
||||
* __DMN__ - We have a baseline implementation of DMN that is well integrated
|
||||
with our Python Execution Engine.
|
||||
* __Forms__ - forms, including text fields, selection lists, and most every other
|
||||
thing you can be extracted from the Camunda xml extension, and returned as
|
||||
json data that can be used to generate forms on the command line, or in web
|
||||
applications (we've used Formly to good success)
|
||||
* __Python Workflows__ - We've retained support for building workflows directly
|
||||
in code, or running workflows based on a internal json data structure.
|
||||
|
||||
_A complete list of the latest features is available with our [release notes](https://github.com/sartography/SpiffWorkflow/releases/tag/1.0) for
|
||||
version 1.0._
|
||||
|
||||
## Code Examples and Documentation
|
||||
Detailed documentation is available on [ReadTheDocs](https://spiffworkflow.readthedocs.io/en/latest/)
|
||||
Also, checkout our [example application](https://github.com/sartography/spiff-example-cli), which we
|
||||
reference extensively from the Documentation.
|
||||
|
||||
## Installation
|
||||
```
|
||||
pip install spiffworkflow
|
||||
```
|
||||
|
||||
## Tests
|
||||
```
|
||||
cd tests/SpiffWorkflow
|
||||
coverage run --source=SpiffWorkflow -m unittest discover -v . "*Test.py"
|
||||
```
|
||||
|
||||
## Support
|
||||
You can find us on Discord at https://discord.gg/zDEBEnrF
|
||||
|
||||
Commercial support for SpiffWorkflow is available from
|
||||
[Sartography](https://sartography.com)
|
||||
|
||||
## Contribute
|
||||
Pull Requests are and always will be welcome!
|
||||
|
||||
Please check your formatting, assure that all tests are passing, and include
|
||||
any additional tests that can demonstrate the new code you created is working
|
||||
as expected. If applicable, please reference the issue number in your pull
|
||||
request.
|
||||
|
||||
## Credits and Thanks
|
||||
|
||||
Samuel Abels (@knipknap) for creating SpiffWorkflow and maintaining it for over
|
||||
a decade.
|
||||
|
||||
Matthew Hampton (@matthewhampton) for his initial contributions around BPMN
|
||||
parsing and execution.
|
||||
|
||||
The University of Virginia for allowing us to take on the mammoth task of
|
||||
building a general-purpose workflow system for BPMN, and allowing us to
|
||||
contribute that back to the open source community. In particular, we would like
|
||||
to thank [Ron Hutchins](https://www.linkedin.com/in/ron-hutchins-b19603123/),
|
||||
for his trust and support. Without him our efforts would not be possible.
|
||||
|
||||
Bruce Silver, the author of BPMN Quick and Easy Using Method and Style, whose
|
||||
work we referenced extensively as we made implementation decisions and
|
||||
educated ourselves on the BPMN and DMN standards.
|
||||
|
||||
The BPMN.js library, without which we would not have the tools to effectively
|
||||
build out our models, embed an editor in our application, and pull this mad
|
||||
mess together.
|
||||
|
||||
Kelly McDonald (@w4kpm) who dove deeper into the core of SpiffWorkflow than
|
||||
anyone else, and was instrumental in helping us get some of these major
|
||||
enhancements working correctly.
|
||||
|
||||
Thanks also to the many contributions from our community. Large and small.
|
||||
From Ziad (@ziadsawalha) in the early days to Elizabeth (@essweine) more
|
||||
recently. It is good to be a part of this long lived and strong
|
||||
community.
|
||||
|
||||
|
||||
## License
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
|
@ -0,0 +1,313 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
import datetime
|
||||
import operator
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from .PythonScriptEngine import PythonScriptEngine
|
||||
|
||||
# Copyright (C) 2020 Kelly McDonald
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
|
||||
def feelConvertTime(datestr,parsestr):
|
||||
return datetime.datetime.strptime(datestr,parsestr)
|
||||
|
||||
class FeelInterval():
|
||||
def __init__(self, begin, end, leftOpen=False, rightOpen=False):
|
||||
# pesky thing with python floats and Decimal comparison
|
||||
if isinstance(begin,float):
|
||||
begin = Decimal("%0.5f"%begin)
|
||||
if isinstance(end, float):
|
||||
end = Decimal("%0.5f" % end)
|
||||
|
||||
self.startInterval = begin
|
||||
self.endInterval = end
|
||||
self.leftOpen = leftOpen
|
||||
self.rightOpen = rightOpen
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.leftOpen:
|
||||
lhs = other > self.startInterval
|
||||
else:
|
||||
lhs = other >= self.startInterval
|
||||
if self.rightOpen:
|
||||
rhs = other < self.endInterval
|
||||
else:
|
||||
rhs = other <= self.endInterval
|
||||
return lhs and rhs
|
||||
|
||||
class FeelContains():
|
||||
def __init__(self, testItem,invert=False ):
|
||||
self.test = testItem
|
||||
self.invert = invert
|
||||
def __eq__(self, other):
|
||||
has = False
|
||||
if isinstance(other,dict):
|
||||
has = self.test in list(other.keys())
|
||||
else:
|
||||
has = self.test in list(other)
|
||||
if self.invert:
|
||||
return not has
|
||||
else:
|
||||
return has
|
||||
|
||||
class FeelNot():
|
||||
def __init__(self, testItem):
|
||||
self.test = testItem
|
||||
|
||||
def __eq__(self, other):
|
||||
if other == self.test:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def feelConcatenate(*lst):
|
||||
ilist = []
|
||||
for l in lst:
|
||||
ilist = ilist + l
|
||||
return ilist
|
||||
|
||||
def feelAppend(lst,item):
|
||||
newlist = lst[:] # get a copy
|
||||
newlist.append(item)
|
||||
return newlist
|
||||
|
||||
def feelNow():
|
||||
return datetime.datetime.now()
|
||||
|
||||
def feelGregorianDOW(date):
|
||||
# we assume date is either date in Y-m-d format
|
||||
# or it is of datetime class
|
||||
if isinstance(date,str):
|
||||
date = datetime.datetime.strptime(date,'%Y-%m-%d')
|
||||
return date.isoweekday()%7
|
||||
|
||||
|
||||
def transformDuration(duration,td):
|
||||
if duration:
|
||||
return td * float(duration)
|
||||
else:
|
||||
return timedelta(seconds=0)
|
||||
|
||||
def lookupPart(code,base):
|
||||
x= re.search("([0-9.]+)"+code,base)
|
||||
if x:
|
||||
return x.group(1)
|
||||
else:
|
||||
return None
|
||||
|
||||
def feelFilter(var,a,b,op,column=None):
|
||||
"""
|
||||
here we are trying to cover some of the basic test cases,
|
||||
dict, list of dicts and list.
|
||||
"""
|
||||
opmap = {'=':operator.eq,
|
||||
'<':operator.lt,
|
||||
'>':operator.gt,
|
||||
'<=':operator.le,
|
||||
'>=':operator.ge,
|
||||
'!=':operator.ne}
|
||||
b = eval(b)
|
||||
# if it is a list and we are referring to 'item' then we
|
||||
# expect the variable to be a simple list
|
||||
if (isinstance(var,list)) and a == 'item':
|
||||
return [x for x in var if opmap[op](x,b)]
|
||||
# if it is a dictionary, and the keys refer to dictionaries,
|
||||
# then we convert it to a list of dictionaries with the elements
|
||||
# all having {'key':key,<rest of dict>}
|
||||
# if it is a dictionary and the key refers to a non-dict, then
|
||||
# we convert to a dict having {'key':key,'value':value}
|
||||
if (isinstance(var,dict)):
|
||||
newvar = []
|
||||
for key in var.keys():
|
||||
if isinstance(var[key],dict):
|
||||
newterm = var[key]
|
||||
newterm.update({'key':key})
|
||||
newvar.append(newterm)
|
||||
else:
|
||||
newvar.append({'key':key,'value':var[key]})
|
||||
var = newvar
|
||||
|
||||
if column!=None:
|
||||
return [x.get(column) for x in var if opmap[op](x.get(a), b)]
|
||||
else:
|
||||
return [x for x in var if opmap[op](x.get(a), b)]
|
||||
|
||||
|
||||
|
||||
def feelParseISODuration(input):
|
||||
"""
|
||||
Given an ISO duration designation
|
||||
such as :
|
||||
P0Y1M2DT3H2S
|
||||
and convert it into a python timedelta
|
||||
|
||||
Abbreviations may be made as in :
|
||||
|
||||
PT30S
|
||||
|
||||
NB:
|
||||
Months are defined as 30 days currently - as I am dreading getting into
|
||||
Date arithmetic edge cases.
|
||||
|
||||
"""
|
||||
if input[0] != 'P':
|
||||
raise Exception("Oh Crap!")
|
||||
input = input[1:]
|
||||
days, time = input.split("T")
|
||||
lookups = [("Y",days,timedelta(days=365)),
|
||||
("M", days, timedelta(days=30)),
|
||||
("W", days, timedelta(days=7)),
|
||||
("D", days, timedelta(days=1)),
|
||||
("H", time, timedelta(seconds=60*60)),
|
||||
("M", time, timedelta(seconds=60)),
|
||||
("S", time, timedelta(seconds=1)),
|
||||
]
|
||||
totaltime = [transformDuration(lookupPart(x[0],x[1]),x[2]) for x in lookups]
|
||||
return sum(totaltime,timedelta(seconds=0))
|
||||
|
||||
|
||||
|
||||
# Order Matters!!
|
||||
fixes = [(r'string\s+length\((.+?)\)','len(\\1)'),
|
||||
(r'count\((.+?)\)','len(\1)'),
|
||||
(r'concatenate\((.+?)\)','feelConcatenate(\\1)'),
|
||||
(r'append\((.+?),(.+?)\)','feelAppend(\\1,\\2)'), # again will not work with literal list
|
||||
(r'list\s+contains\((.+?),(.+?)\)','\\2 in \\1'), # list contains(['a','b','stupid,','c'],'stupid,') will break
|
||||
(r'contains\((.+?),(.+?)\)','\\2 in \\1'), # contains('my stupid, stupid comment','stupid') will break
|
||||
(r'not\s+?contains\((.+?)\)','FeelContains(\\1,invert=True)'), # not contains('something')
|
||||
(r'not\((.+?)\)','FeelNot(\\1)'), # not('x')
|
||||
|
||||
(r'now\(\)','feelNow()'),
|
||||
(r'contains\((.+?)\)', 'FeelContains(\\1)'), # contains('x')
|
||||
# date and time (<datestr>)
|
||||
(r'date\s+?and\s+?time\s*\((.+?)\)', 'feelConvertTime(\\1,"%Y-%m-%dT%H:%M:%S")'),
|
||||
(r'date\s*\((.+?)\)', 'feelConvertTime(\\1,"%Y-%m-%d)'), # date (<datestring>)
|
||||
(r'day\s+of\s+\week\((.+?)\)','feelGregorianDOW(\\1)'),
|
||||
(r'\[([^\[\]]+?)[.]{2}([^\[\]]+?)\]','FeelInterval(\\1,\\2)'), # closed interval on both sides
|
||||
(r'[\]\(]([^\[\]\(\)]+?)[.]{2}([^\[\]\)\(]+?)\]','FeelInterval(\\1,\\2,leftOpen=True)'), # open lhs
|
||||
(r'\[([^\[\]\(\)]+?)[.]{2}([^\[\]\(\)]+?)[\[\)]','FeelInterval(\\1,\\2,rightOpen=True)'), # open rhs
|
||||
# I was having problems with this matching a "P" somewhere in another expression
|
||||
# so I added a bunch of different cases that should isolate this.
|
||||
(r'^(P(([0-9.]+Y)?([0-9.]+M)?([0-9.]+W)?([0-9.]+D)?)?(T([0-9.]+H)?([0-9.]+M)?([0-9.]+S)?)?)$',
|
||||
'feelParseISODuration("\\1")'), ## Parse ISO Duration convert to timedelta - standalone
|
||||
(r'^(P(([0-9.]+Y)?([0-9.]+M)?([0-9.]+W)?([0-9.]+D)?)?(T([0-9.]+H)?([0-9.]+M)?([0-9.]+S)?)?)\s',
|
||||
'feelParseISODuration("\\1") '), ## Parse ISO Duration convert to timedelta beginning
|
||||
(r'\s(P(([0-9.]+Y)?([0-9.]+M)?([0-9.]+W)?([0-9.]+D)?)?(T([0-9.]+H)?([0-9.]+M)?([0-9.]+S)?)?)\s',
|
||||
' feelParseISODuration("\\1") '), ## Parse ISO Duration convert to timedelta in context
|
||||
(r'\s(P(([0-9.]+Y)?([0-9.]+M)?([0-9.]+W)?([0-9.]+D)?)?(T([0-9.]+H)?([0-9.]+M)?([0-9.]+S)?)?)$',
|
||||
' feelParseISODuration("\\1")'), ## Parse ISO Duration convert to timedelta end
|
||||
|
||||
(r'(.+)\[(\S+)?(<=)(.+)]\.(\S+)', 'feelFilter(\\1,"\\2","\\4","\\3","\\5")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?(>=)(.+)]\.(\S+)', 'feelFilter(\\1,"\\2","\\4","\\3","\\5")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?(!=)(.+)]\.(\S+)', 'feelFilter(\\1,"\\2","\\4","\\3","\\5")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?([=<>])(.+)]\.(\S+)', 'feelFilter(\\1,"\\2",\\4,"\\3","\\5")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?(<=)(.+)]', 'feelFilter(\\1,"\\2","\\4","\\3")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?(>=)(.+)]', 'feelFilter(\\1,"\\2","\\4","\\3")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?(!=)(.+)]', 'feelFilter(\\1,"\\2","\\4","\\3")'), # implement a simple filter
|
||||
(r'(.+)\[(\S+)?([=<>])(.+)]','feelFilter(\\1,"\\2","\\4","\\3")'), # implement a simple filter
|
||||
(r'[\]\(]([^\[\]\(\)]+?)[.]{2}([^\[\]\(\)]+?)[\[\)]',
|
||||
'FeelInterval(\\1,\\2,rightOpen=True,leftOpen=True)'), # open both
|
||||
|
||||
|
||||
# parse dot.dict for several different edge cases
|
||||
# make sure that it begins with a letter character - otherwise we
|
||||
# may get float numbers.
|
||||
# will not work for cases where we do something like:
|
||||
# x contains(this.dotdict.item)
|
||||
# and it may be difficult, because we do not want to replace for the case of
|
||||
# somedict.keys() - because that is actually in the tests.
|
||||
# however, it would be fixed by doing:
|
||||
# x contains( this.dotdict.item )
|
||||
|
||||
('true','True'),
|
||||
('false','False')
|
||||
]
|
||||
|
||||
externalFuncs = {
|
||||
'feelConvertTime':feelConvertTime,
|
||||
'FeelInterval':FeelInterval,
|
||||
'FeelNot':FeelNot,
|
||||
'Decimal':Decimal,
|
||||
'feelConcatenate': feelConcatenate,
|
||||
'feelAppend': feelAppend,
|
||||
'feelFilter': feelFilter,
|
||||
'feelNow': feelNow,
|
||||
'FeelContains': FeelContains,
|
||||
'datetime':datetime,
|
||||
'feelParseISODuration': feelParseISODuration,
|
||||
'feelGregorianDOW':feelGregorianDOW,
|
||||
}
|
||||
|
||||
|
||||
class FeelLikeScriptEngine(PythonScriptEngine):
|
||||
"""
|
||||
This should serve as a base for all scripting & expression evaluation
|
||||
operations that are done within both BPMN and BMN. Eventually it will also
|
||||
serve as a base for FEEL expressions as well
|
||||
|
||||
If you are uncomfortable with the use of eval() and exec, then you should
|
||||
provide a specialised subclass that parses and executes the scripts /
|
||||
expressions in a mini-language of your own.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def validate(self, expression):
|
||||
super().validate(self.patch_expression(expression))
|
||||
|
||||
def patch_expression(self, invalid_python, lhs=''):
|
||||
if invalid_python is None:
|
||||
return None
|
||||
proposed_python = invalid_python
|
||||
for transformation in fixes:
|
||||
if isinstance(transformation[1], str):
|
||||
proposed_python = re.sub(transformation[0], transformation[1], proposed_python)
|
||||
else:
|
||||
for x in re.findall(transformation[0], proposed_python):
|
||||
if '.' in(x):
|
||||
proposed_python = proposed_python.replace(x, transformation[1](x))
|
||||
if lhs is not None:
|
||||
proposed_python = lhs + proposed_python
|
||||
return proposed_python
|
||||
|
||||
def _evaluate(self, expression, context, task=None, external_methods=None):
|
||||
"""
|
||||
Evaluate the given expression, within the context of the given task and
|
||||
return the result.
|
||||
"""
|
||||
if external_methods is None:
|
||||
external_methods = {}
|
||||
|
||||
revised = self.patch_expression(expression)
|
||||
external_methods.update(externalFuncs)
|
||||
return super()._evaluate(revised, context, external_methods=external_methods)
|
||||
|
||||
def execute(self, task, script, data, external_methods=None):
|
||||
"""
|
||||
Execute the script, within the context of the specified task
|
||||
"""
|
||||
if external_methods is None:
|
||||
external_methods = {}
|
||||
external_methods.update(externalFuncs)
|
||||
super(PythonScriptEngine).execute(task, script, external_methods)
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,232 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import ast
|
||||
import copy
|
||||
import sys
|
||||
import traceback
|
||||
import datetime
|
||||
|
||||
import dateparser
|
||||
import pytz
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from ..operators import Operator
|
||||
|
||||
# Would love to get rid of this altogether, as it rightly belongs in the
|
||||
# backend, but leaving it here because that's the path of least resistance.
|
||||
DEFAULT_GLOBALS = {
|
||||
'timedelta': datetime.timedelta,
|
||||
'datetime': datetime,
|
||||
'dateparser': dateparser,
|
||||
'pytz': pytz,
|
||||
}
|
||||
|
||||
|
||||
# Copyright (C) 2020 Kelly McDonald
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
|
||||
class Box(dict):
|
||||
"""
|
||||
Example:
|
||||
m = Box({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer'])
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Box, self).__init__(*args, **kwargs)
|
||||
for arg in args:
|
||||
if isinstance(arg, dict):
|
||||
for k, v in arg.items():
|
||||
if isinstance(v, dict):
|
||||
self[k] = Box(v)
|
||||
else:
|
||||
self[k] = v
|
||||
|
||||
if kwargs:
|
||||
for k, v in kwargs.items():
|
||||
if isinstance(v, dict):
|
||||
self[k] = Box(v)
|
||||
else:
|
||||
self[k] = v
|
||||
|
||||
def __deepcopy__(self, memodict=None):
|
||||
if memodict is None:
|
||||
memodict = {}
|
||||
my_copy = Box()
|
||||
for k, v in self.items():
|
||||
my_copy[k] = copy.deepcopy(v)
|
||||
return my_copy
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
output = self[attr]
|
||||
except:
|
||||
raise AttributeError(
|
||||
"Dictionary has no attribute '%s' " % str(attr))
|
||||
return output
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
super(Box, self).__setitem__(key, value)
|
||||
self.__dict__.update({key: value})
|
||||
|
||||
def __getstate__(self):
|
||||
return self.__dict__
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__init__(state)
|
||||
|
||||
def __delattr__(self, item):
|
||||
self.__delitem__(item)
|
||||
|
||||
def __delitem__(self, key):
|
||||
super(Box, self).__delitem__(key)
|
||||
del self.__dict__[key]
|
||||
|
||||
|
||||
class PythonScriptEngine(object):
|
||||
"""
|
||||
This should serve as a base for all scripting & expression evaluation
|
||||
operations that are done within both BPMN and BMN. Eventually it will also
|
||||
serve as a base for FEEL expressions as well
|
||||
|
||||
If you are uncomfortable with the use of eval() and exec, then you should
|
||||
provide a specialised subclass that parses and executes the scripts /
|
||||
expressions in a different way.
|
||||
"""
|
||||
|
||||
def __init__(self, default_globals=None, scripting_additions=None):
|
||||
|
||||
self.globals = default_globals or DEFAULT_GLOBALS
|
||||
self.globals.update(scripting_additions or {})
|
||||
self.error_tasks = {}
|
||||
|
||||
def validate(self, expression):
|
||||
ast.parse(expression)
|
||||
|
||||
def evaluate(self, task, expression, external_methods=None):
|
||||
"""
|
||||
Evaluate the given expression, within the context of the given task and
|
||||
return the result.
|
||||
"""
|
||||
try:
|
||||
if isinstance(expression, Operator):
|
||||
# I am assuming that this takes care of some kind of XML
|
||||
# expression judging from the contents of operators.py
|
||||
return expression._matches(task)
|
||||
else:
|
||||
return self._evaluate(expression, task.data, external_methods)
|
||||
except Exception as e:
|
||||
raise WorkflowTaskExecException(task,
|
||||
f"Error evaluating expression {expression}",
|
||||
e)
|
||||
|
||||
def execute(self, task, script, external_methods=None):
|
||||
"""
|
||||
Execute the script, within the context of the specified task
|
||||
"""
|
||||
try:
|
||||
self.check_for_overwrite(task, external_methods or {})
|
||||
self._execute(script, task.data, external_methods or {})
|
||||
except Exception as err:
|
||||
wte = self.create_task_exec_exception(task, err)
|
||||
self.error_tasks[task.id] = wte
|
||||
raise wte
|
||||
|
||||
def call_service(self, operation_name, operation_params, task_data):
|
||||
"""Override to control how external services are called from service
|
||||
tasks."""
|
||||
raise NotImplementedError("To call external services override the script engine and implement `call_service`.")
|
||||
|
||||
def create_task_exec_exception(self, task, err):
|
||||
|
||||
if isinstance(err, WorkflowTaskExecException):
|
||||
return err
|
||||
|
||||
detail = err.__class__.__name__
|
||||
if len(err.args) > 0:
|
||||
detail += ":" + err.args[0]
|
||||
line_number = 0
|
||||
error_line = ''
|
||||
cl, exc, tb = sys.exc_info()
|
||||
# Loop back through the stack trace to find the file called
|
||||
# 'string' - which is the script we are executing, then use that
|
||||
# to parse and pull out the offending line.
|
||||
for frame_summary in traceback.extract_tb(tb):
|
||||
if frame_summary.filename == '<string>':
|
||||
line_number = frame_summary.lineno
|
||||
error_line = task.task_spec.script.splitlines()[
|
||||
line_number - 1]
|
||||
return WorkflowTaskExecException(task, detail, err, line_number,
|
||||
error_line)
|
||||
|
||||
def check_for_overwrite(self, task, external_methods):
|
||||
"""It's possible that someone will define a variable with the
|
||||
same name as a pre-defined script, rending the script un-callable.
|
||||
This results in a nearly indecipherable error. Better to fail
|
||||
fast with a sensible error message."""
|
||||
func_overwrites = set(self.globals).intersection(task.data)
|
||||
func_overwrites.update(set(external_methods).intersection(task.data))
|
||||
if len(func_overwrites) > 0:
|
||||
msg = f"You have task data that overwrites a predefined " \
|
||||
f"function(s). Please change the following variable or " \
|
||||
f"field name(s) to something else: {func_overwrites}"
|
||||
raise WorkflowTaskExecException(task, msg)
|
||||
|
||||
def convert_to_box(self, data):
|
||||
if isinstance(data, dict):
|
||||
for key, value in data.items():
|
||||
if not isinstance(value, Box):
|
||||
data[key] = self.convert_to_box(value)
|
||||
return Box(data)
|
||||
if isinstance(data, list):
|
||||
for idx, value in enumerate(data):
|
||||
data[idx] = self.convert_to_box(value)
|
||||
return data
|
||||
return data
|
||||
|
||||
def _evaluate(self, expression, context, external_methods=None):
|
||||
|
||||
globals = copy.copy(self.globals) # else we pollute all later evals.
|
||||
self.convert_to_box(context)
|
||||
globals.update(external_methods or {})
|
||||
globals.update(context)
|
||||
return eval(expression, globals)
|
||||
|
||||
def _execute(self, script, context, external_methods=None):
|
||||
|
||||
my_globals = copy.copy(self.globals)
|
||||
self.convert_to_box(context)
|
||||
my_globals.update(external_methods or {})
|
||||
context.update(my_globals)
|
||||
try:
|
||||
exec(script, context)
|
||||
finally:
|
||||
self.remove_globals_and_functions_from_context(context,
|
||||
external_methods)
|
||||
|
||||
def remove_globals_and_functions_from_context(self, context,
|
||||
external_methods=None):
|
||||
"""When executing a script, don't leave the globals, functions
|
||||
and external methods in the context that we have modified."""
|
||||
for k in list(context):
|
||||
if k == "__builtins__" or \
|
||||
hasattr(context[k], '__call__') or \
|
||||
k in self.globals or \
|
||||
external_methods and k in external_methods:
|
||||
context.pop(k)
|
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
|
@ -0,0 +1,62 @@
|
|||
import re
|
||||
|
||||
from SpiffWorkflow.exceptions import WorkflowException, WorkflowTaskException
|
||||
from SpiffWorkflow.util import levenshtein
|
||||
|
||||
class WorkflowTaskExecException(WorkflowTaskException):
|
||||
"""
|
||||
Exception during execution of task "payload". For example:
|
||||
|
||||
* ScriptTask during execution of embedded script,
|
||||
* ServiceTask during external service call.
|
||||
"""
|
||||
|
||||
def __init__(self, task, error_msg, exception=None, line_number=0, error_line=""):
|
||||
"""
|
||||
Exception initialization.
|
||||
|
||||
:param task: the task that threw the exception
|
||||
:type task: Task
|
||||
:param exception: a human readable error message
|
||||
:type exception: Exception
|
||||
|
||||
"""
|
||||
|
||||
self.offset = 0
|
||||
self.line_number = line_number
|
||||
self.error_line = error_line
|
||||
|
||||
if isinstance(exception, SyntaxError):
|
||||
# Prefer line number from syntax error if available.
|
||||
self.line_number = exception.lineno
|
||||
self.offset = exception.offset
|
||||
elif isinstance(exception, NameError):
|
||||
def_match = re.match("name '(.+)' is not defined", str(exception))
|
||||
if def_match:
|
||||
bad_variable = re.match("name '(.+)' is not defined", str(exception)).group(1)
|
||||
most_similar = levenshtein.most_similar(bad_variable, task.data.keys(), 3)
|
||||
error_msg = f'something you are referencing does not exist: ' \
|
||||
f'"{exception}".'
|
||||
if len(most_similar) == 1:
|
||||
error_msg += f' Did you mean \'{most_similar[0]}\'?'
|
||||
if len(most_similar) > 1:
|
||||
error_msg += f' Did you mean one of \'{most_similar}\'?'
|
||||
|
||||
else:
|
||||
error_msg = str(exception)
|
||||
super().__init__(task, error_msg, exception)
|
||||
|
||||
|
||||
class WorkflowDataException(WorkflowException):
|
||||
|
||||
def __init__(self, task, data_input=None, data_output=None, message=None):
|
||||
"""
|
||||
:param task: the task that generated the error
|
||||
:param data_input: the spec of the input variable (if a data input)
|
||||
:param data_output: the spec of the output variable (if a data output)
|
||||
"""
|
||||
super().__init__(task.task_spec, message or 'data object error')
|
||||
self.task = task
|
||||
self.data_input = data_input
|
||||
self.data_output = data_output
|
||||
self.task_trace = self.get_task_trace(task)
|
|
@ -0,0 +1,253 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import glob
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import NoneEventDefinition
|
||||
|
||||
from .ValidationException import ValidationException
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||
from ..specs.events import StartEvent, EndEvent, BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
||||
from ..specs.events import SendTask, ReceiveTask
|
||||
from ..specs.SubWorkflowTask import CallActivity, SubWorkflowTask, TransactionSubprocess
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ..specs.InclusiveGateway import InclusiveGateway
|
||||
from ..specs.ManualTask import ManualTask
|
||||
from ..specs.NoneTask import NoneTask
|
||||
from ..specs.ParallelGateway import ParallelGateway
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.ServiceTask import ServiceTask
|
||||
from ..specs.UserTask import UserTask
|
||||
from .ProcessParser import ProcessParser
|
||||
from .util import full_tag, xpath_eval, first
|
||||
from .task_parsers import (UserTaskParser, NoneTaskParser, ManualTaskParser,
|
||||
ExclusiveGatewayParser, ParallelGatewayParser, InclusiveGatewayParser,
|
||||
CallActivityParser, ScriptTaskParser, SubWorkflowParser,
|
||||
ServiceTaskParser)
|
||||
from .event_parsers import (StartEventParser, EndEventParser, BoundaryEventParser,
|
||||
IntermediateCatchEventParser, IntermediateThrowEventParser,
|
||||
SendTaskParser, ReceiveTaskParser)
|
||||
|
||||
|
||||
class BpmnParser(object):
|
||||
"""
|
||||
The BpmnParser class is a pluggable base class that manages the parsing of
|
||||
a set of BPMN files. It is intended that this class will be overriden by an
|
||||
application that implements a BPMN engine.
|
||||
|
||||
Extension points: OVERRIDE_PARSER_CLASSES provides a map from full BPMN tag
|
||||
name to a TaskParser and Task class. PROCESS_PARSER_CLASS provides a
|
||||
subclass of ProcessParser
|
||||
"""
|
||||
|
||||
PARSER_CLASSES = {
|
||||
full_tag('startEvent'): (StartEventParser, StartEvent),
|
||||
full_tag('endEvent'): (EndEventParser, EndEvent),
|
||||
full_tag('userTask'): (UserTaskParser, UserTask),
|
||||
full_tag('task'): (NoneTaskParser, NoneTask),
|
||||
full_tag('subProcess'): (SubWorkflowParser, CallActivity),
|
||||
full_tag('manualTask'): (ManualTaskParser, ManualTask),
|
||||
full_tag('exclusiveGateway'): (ExclusiveGatewayParser, ExclusiveGateway),
|
||||
full_tag('parallelGateway'): (ParallelGatewayParser, ParallelGateway),
|
||||
full_tag('inclusiveGateway'): (InclusiveGatewayParser, InclusiveGateway),
|
||||
full_tag('callActivity'): (CallActivityParser, CallActivity),
|
||||
full_tag('transaction'): (SubWorkflowParser, TransactionSubprocess),
|
||||
full_tag('scriptTask'): (ScriptTaskParser, ScriptTask),
|
||||
full_tag('serviceTask'): (ServiceTaskParser, ServiceTask),
|
||||
full_tag('intermediateCatchEvent'): (IntermediateCatchEventParser, IntermediateCatchEvent),
|
||||
full_tag('intermediateThrowEvent'): (IntermediateThrowEventParser, IntermediateThrowEvent),
|
||||
full_tag('boundaryEvent'): (BoundaryEventParser, BoundaryEvent),
|
||||
full_tag('receiveTask'): (ReceiveTaskParser, ReceiveTask),
|
||||
full_tag('sendTask'): (SendTaskParser, SendTask),
|
||||
}
|
||||
|
||||
OVERRIDE_PARSER_CLASSES = {}
|
||||
|
||||
PROCESS_PARSER_CLASS = ProcessParser
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
self.process_parsers = {}
|
||||
self.process_parsers_by_name = {}
|
||||
self.collaborations = {}
|
||||
self.process_dependencies = set()
|
||||
self.dmn_dependencies = set()
|
||||
|
||||
def _get_parser_class(self, tag):
|
||||
if tag in self.OVERRIDE_PARSER_CLASSES:
|
||||
return self.OVERRIDE_PARSER_CLASSES[tag]
|
||||
elif tag in self.PARSER_CLASSES:
|
||||
return self.PARSER_CLASSES[tag]
|
||||
return None, None
|
||||
|
||||
def get_process_parser(self, process_id_or_name):
|
||||
"""
|
||||
Returns the ProcessParser for the given process ID or name. It matches
|
||||
by name first.
|
||||
"""
|
||||
if process_id_or_name in self.process_parsers_by_name:
|
||||
return self.process_parsers_by_name[process_id_or_name]
|
||||
elif process_id_or_name in self.process_parsers:
|
||||
return self.process_parsers[process_id_or_name]
|
||||
|
||||
def get_process_ids(self):
|
||||
"""Returns a list of process IDs"""
|
||||
return list(self.process_parsers.keys())
|
||||
|
||||
def add_bpmn_file(self, filename):
|
||||
"""
|
||||
Add the given BPMN filename to the parser's set.
|
||||
"""
|
||||
self.add_bpmn_files([filename])
|
||||
|
||||
def add_bpmn_files_by_glob(self, g):
|
||||
"""
|
||||
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
|
||||
parser's set.
|
||||
"""
|
||||
self.add_bpmn_files(glob.glob(g))
|
||||
|
||||
def add_bpmn_files(self, filenames):
|
||||
"""
|
||||
Add all filenames in the given list to the parser's set.
|
||||
"""
|
||||
for filename in filenames:
|
||||
f = open(filename, 'r')
|
||||
try:
|
||||
self.add_bpmn_xml(etree.parse(f), filename=filename)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def add_bpmn_xml(self, bpmn, filename=None):
|
||||
"""
|
||||
Add the given lxml representation of the BPMN file to the parser's set.
|
||||
|
||||
:param svg: Optionally, provide the text data for the SVG of the BPMN
|
||||
file
|
||||
:param filename: Optionally, provide the source filename.
|
||||
"""
|
||||
xpath = xpath_eval(bpmn)
|
||||
# do a check on our bpmn to ensure that no id appears twice
|
||||
# this *should* be taken care of by our modeler - so this test
|
||||
# should never fail.
|
||||
ids = [x for x in xpath('.//bpmn:*[@id]')]
|
||||
foundids = {}
|
||||
for node in ids:
|
||||
id = node.get('id')
|
||||
if foundids.get(id,None) is not None:
|
||||
raise ValidationException(
|
||||
'The bpmn document should have no repeating ids but (%s) repeats'%id,
|
||||
node=node,
|
||||
filename=filename)
|
||||
else:
|
||||
foundids[id] = 1
|
||||
|
||||
for process in xpath('.//bpmn:process'):
|
||||
self.create_parser(process, xpath, filename)
|
||||
|
||||
self._find_dependencies(xpath)
|
||||
|
||||
collaboration = first(xpath('.//bpmn:collaboration'))
|
||||
if collaboration is not None:
|
||||
collaboration_xpath = xpath_eval(collaboration)
|
||||
name = collaboration.get('id')
|
||||
self.collaborations[name] = [ participant.get('processRef') for participant in collaboration_xpath('.//bpmn:participant') ]
|
||||
|
||||
def _find_dependencies(self, xpath):
|
||||
"""Locate all calls to external BPMN and DMN files, and store their
|
||||
ids in our list of dependencies"""
|
||||
for call_activity in xpath('.//bpmn:callActivity'):
|
||||
self.process_dependencies.add(call_activity.get('calledElement'))
|
||||
parser_cls, cls = self._get_parser_class(full_tag('businessRuleTask'))
|
||||
if parser_cls:
|
||||
for business_rule in xpath('.//bpmn:businessRuleTask'):
|
||||
self.dmn_dependencies.add(parser_cls.get_decision_ref(business_rule))
|
||||
|
||||
|
||||
def create_parser(self, node, doc_xpath, filename=None, lane=None):
|
||||
parser = self.PROCESS_PARSER_CLASS(self, node, filename=filename, doc_xpath=doc_xpath, lane=lane)
|
||||
if parser.get_id() in self.process_parsers:
|
||||
raise ValidationException('Duplicate process ID', node=node, filename=filename)
|
||||
if parser.get_name() in self.process_parsers_by_name:
|
||||
raise ValidationException('Duplicate process name', node=node, filename=filename)
|
||||
self.process_parsers[parser.get_id()] = parser
|
||||
self.process_parsers_by_name[parser.get_name()] = parser
|
||||
|
||||
def get_dependencies(self):
|
||||
return self.process_dependencies.union(self.dmn_dependencies)
|
||||
|
||||
def get_process_dependencies(self):
|
||||
return self.process_dependencies
|
||||
|
||||
def get_dmn_dependencies(self):
|
||||
return self.dmn_dependencies
|
||||
|
||||
def get_spec(self, process_id_or_name):
|
||||
"""
|
||||
Parses the required subset of the BPMN files, in order to provide an
|
||||
instance of BpmnProcessSpec (i.e. WorkflowSpec)
|
||||
for the given process ID or name. The Name is matched first.
|
||||
"""
|
||||
parser = self.get_process_parser(process_id_or_name)
|
||||
if parser is None:
|
||||
raise ValidationException(
|
||||
f"The process '{process_id_or_name}' was not found. "
|
||||
f"Did you mean one of the following: "
|
||||
f"{', '.join(self.get_process_ids())}?")
|
||||
return parser.get_spec()
|
||||
|
||||
def get_subprocess_specs(self, name, specs=None):
|
||||
used = specs or {}
|
||||
wf_spec = self.get_spec(name)
|
||||
for task_spec in wf_spec.task_specs.values():
|
||||
if isinstance(task_spec, SubWorkflowTask) and task_spec.spec not in used:
|
||||
used[task_spec.spec] = self.get_spec(task_spec.spec)
|
||||
self.get_subprocess_specs(task_spec.spec, used)
|
||||
return used
|
||||
|
||||
def find_all_specs(self):
|
||||
# This is a little convoluted, but we might add more processes as we generate
|
||||
# the dictionary if something refers to another subprocess that we haven't seen.
|
||||
processes = dict((id, self.get_spec(id)) for id in self.get_process_ids())
|
||||
while processes.keys() != self.process_parsers.keys():
|
||||
for process_id in self.process_parsers.keys():
|
||||
processes[process_id] = self.get_spec(process_id)
|
||||
return processes
|
||||
|
||||
def get_collaboration(self, name):
|
||||
self.find_all_specs()
|
||||
spec = BpmnProcessSpec(name)
|
||||
subprocesses = {}
|
||||
start = StartEvent(spec, 'Start Collaboration', NoneEventDefinition())
|
||||
spec.start.connect(start)
|
||||
end = EndEvent(spec, 'End Collaboration', NoneEventDefinition())
|
||||
end.connect(spec.end)
|
||||
for process in self.collaborations[name]:
|
||||
process_parser = self.get_process_parser(process)
|
||||
if process_parser and process_parser.process_executable:
|
||||
participant = CallActivity(spec, process, process)
|
||||
start.connect(participant)
|
||||
participant.connect(end)
|
||||
subprocesses[process] = self.get_spec(process)
|
||||
subprocesses.update(self.get_subprocess_specs(process))
|
||||
return spec, subprocesses
|
|
@ -0,0 +1,118 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .ValidationException import ValidationException
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec, BpmnDataSpecification
|
||||
from .node_parser import NodeParser
|
||||
from .util import first
|
||||
|
||||
|
||||
class ProcessParser(NodeParser):
|
||||
"""
|
||||
Parses a single BPMN process, including all of the tasks within that
|
||||
process.
|
||||
"""
|
||||
|
||||
def __init__(self, p, node, filename=None, doc_xpath=None, lane=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param p: the owning BpmnParser instance
|
||||
:param node: the XML node for the process
|
||||
:param filename: the source BPMN filename (optional)
|
||||
:param doc_xpath: an xpath evaluator for the document (optional)
|
||||
:param lane: the lane of a subprocess (optional)
|
||||
"""
|
||||
super().__init__(node, filename, doc_xpath, lane)
|
||||
self.parser = p
|
||||
self.parsed_nodes = {}
|
||||
self.lane = lane
|
||||
self.spec = None
|
||||
self.process_executable = True
|
||||
|
||||
def get_name(self):
|
||||
"""
|
||||
Returns the process name (or ID, if no name is included in the file)
|
||||
"""
|
||||
return self.node.get('name', default=self.get_id())
|
||||
|
||||
def parse_node(self, node):
|
||||
"""
|
||||
Parses the specified child task node, and returns the task spec. This
|
||||
can be called by a TaskParser instance, that is owned by this
|
||||
ProcessParser.
|
||||
"""
|
||||
|
||||
if node.get('id') in self.parsed_nodes:
|
||||
return self.parsed_nodes[node.get('id')]
|
||||
|
||||
(node_parser, spec_class) = self.parser._get_parser_class(node.tag)
|
||||
if not node_parser or not spec_class:
|
||||
raise ValidationException("There is no support implemented for this task type.",
|
||||
node=node, filename=self.filename)
|
||||
np = node_parser(self, spec_class, node, self.lane)
|
||||
task_spec = np.parse_node()
|
||||
return task_spec
|
||||
|
||||
def _parse(self):
|
||||
# here we only look in the top level, We will have another
|
||||
# bpmn:startEvent if we have a subworkflow task
|
||||
self.process_executable = self.node.get('isExecutable', 'true') == 'true'
|
||||
start_node_list = self.xpath('./bpmn:startEvent')
|
||||
if not start_node_list and self.process_executable:
|
||||
raise ValidationException("No start event found", node=self.node, filename=self.filename)
|
||||
self.spec = BpmnProcessSpec(name=self.get_id(), description=self.get_name(), filename=self.filename)
|
||||
|
||||
# Check for an IO Specification.
|
||||
io_spec = first(self.xpath('./bpmn:ioSpecification'))
|
||||
if io_spec is not None:
|
||||
data_parser = DataSpecificationParser(io_spec, self.filename, self.doc_xpath)
|
||||
self.spec.data_inputs, self.spec.data_outputs = data_parser.parse_io_spec()
|
||||
|
||||
# Get the data objects
|
||||
for obj in self.xpath('./bpmn:dataObject'):
|
||||
data_parser = DataSpecificationParser(obj, self.filename, self.doc_xpath)
|
||||
data_object = data_parser.parse_data_object()
|
||||
self.spec.data_objects[data_object.name] = data_object
|
||||
|
||||
for node in start_node_list:
|
||||
self.parse_node(node)
|
||||
|
||||
def get_spec(self):
|
||||
"""
|
||||
Parse this process (if it has not already been parsed), and return the
|
||||
workflow spec.
|
||||
"""
|
||||
if self.spec is None:
|
||||
self._parse()
|
||||
return self.spec
|
||||
|
||||
|
||||
class DataSpecificationParser(NodeParser):
|
||||
|
||||
def parse_io_spec(self):
|
||||
inputs, outputs = [], []
|
||||
for elem in self.xpath('./bpmn:dataInput'):
|
||||
inputs.append(BpmnDataSpecification(elem.get('id'), elem.get('name')))
|
||||
for elem in self.xpath('./bpmn:dataOutput'):
|
||||
outputs.append(BpmnDataSpecification(elem.get('id'), elem.get('name')))
|
||||
return inputs, outputs
|
||||
|
||||
def parse_data_object(self):
|
||||
return BpmnDataSpecification(self.node.get('id'), self.node.get('name'))
|
|
@ -0,0 +1,248 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
from .ValidationException import ValidationException
|
||||
from ..specs.NoneTask import NoneTask
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.UserTask import UserTask
|
||||
from ..specs.events import _BoundaryEventParent, CancelEventDefinition
|
||||
from ..specs.MultiInstanceTask import getDynamicMIClass
|
||||
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...operators import Attrib, PathAttrib
|
||||
from .util import one, first
|
||||
from .node_parser import NodeParser
|
||||
from ...specs.SubWorkflow import SubWorkflow
|
||||
|
||||
STANDARDLOOPCOUNT = '25'
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class TaskParser(NodeParser):
|
||||
"""
|
||||
This class parses a single BPMN task node, and returns the Task Spec for
|
||||
that node.
|
||||
|
||||
It also results in the recursive parsing of connected tasks, connecting all
|
||||
outgoing transitions, once the child tasks have all been parsed.
|
||||
"""
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, lane=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param process_parser: the owning process parser instance
|
||||
:param spec_class: the type of spec that should be created. This allows
|
||||
a subclass of BpmnParser to provide a specialised spec class, without
|
||||
extending the TaskParser.
|
||||
:param node: the XML node for this task
|
||||
"""
|
||||
super().__init__(node, process_parser.filename, process_parser.doc_xpath, lane)
|
||||
self.process_parser = process_parser
|
||||
self.spec_class = spec_class
|
||||
self.spec = self.process_parser.spec
|
||||
|
||||
def _set_multiinstance_attributes(self, is_sequential, expanded, loop_count,
|
||||
loop_task=False, element_var=None, collection=None, completion_condition=None):
|
||||
# This should be replaced with its own task parser (though I'm not sure how feasible this is given
|
||||
# the current parser achitecture). We should also consider separate classes for loop vs
|
||||
# multiinstance because having all these optional attributes is a nightmare
|
||||
|
||||
if not isinstance(self.task, (NoneTask,UserTask,BusinessRuleTask,ScriptTask,CallActivity,SubWorkflow)):
|
||||
raise ValidationException(
|
||||
f'Unsupported MultiInstance Task: {self.task.__class__}',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
|
||||
self.task.loopTask = loop_task
|
||||
self.task.isSequential = is_sequential
|
||||
self.task.expanded = expanded
|
||||
# make dot notation compatible with bmpmn path notation.
|
||||
self.task.times = PathAttrib(loop_count.replace('.', '/')) if loop_count.find('.') > 0 else Attrib(loop_count)
|
||||
self.task.elementVar = element_var
|
||||
self.task.collection = collection
|
||||
self.task.completioncondition = completion_condition
|
||||
|
||||
self.task.prevtaskclass = self.task.__module__ + "." + self.task.__class__.__name__
|
||||
newtaskclass = getDynamicMIClass(self.get_id(),self.task.__class__)
|
||||
self.task.__class__ = newtaskclass
|
||||
|
||||
def _detect_multiinstance(self):
|
||||
|
||||
multiinstance_element = first(self.xpath('./bpmn:multiInstanceLoopCharacteristics'))
|
||||
if multiinstance_element is not None:
|
||||
is_sequential = multiinstance_element.get('isSequential') == 'true'
|
||||
|
||||
element_var_text = multiinstance_element.attrib.get('{' + CAMUNDA_MODEL_NS + '}elementVariable')
|
||||
collection_text = multiinstance_element.attrib.get('{' + CAMUNDA_MODEL_NS + '}collection')
|
||||
|
||||
loop_cardinality = first(self.xpath('./bpmn:multiInstanceLoopCharacteristics/bpmn:loopCardinality'))
|
||||
if loop_cardinality is not None:
|
||||
loop_count = loop_cardinality.text
|
||||
elif collection_text is not None:
|
||||
loop_count = collection_text
|
||||
else:
|
||||
loop_count = '1'
|
||||
|
||||
if collection_text is not None:
|
||||
collection = PathAttrib(collection_text.replace('.', '/')) if collection_text.find('.') > 0 else Attrib(collection_text)
|
||||
else:
|
||||
collection = None
|
||||
|
||||
completion_condition = first(self.xpath('./bpmn:multiInstanceLoopCharacteristics/bpmn:completionCondition'))
|
||||
if completion_condition is not None:
|
||||
completion_condition = completion_condition.text
|
||||
|
||||
self._set_multiinstance_attributes(is_sequential, 1, loop_count,
|
||||
element_var=element_var_text,
|
||||
collection=collection,
|
||||
completion_condition=completion_condition)
|
||||
|
||||
elif len(self.xpath('./bpmn:standardLoopCharacteristics')) > 0:
|
||||
self._set_multiinstance_attributes(True, 25, STANDARDLOOPCOUNT, loop_task=True)
|
||||
|
||||
def parse_node(self):
|
||||
"""
|
||||
Parse this node, and all children, returning the connected task spec.
|
||||
"""
|
||||
try:
|
||||
self.task = self.create_task()
|
||||
# Why do we just set random attributes willy nilly everywhere in the code????
|
||||
# And we still pass around a gigantic kwargs dict whenever we create anything!
|
||||
self.task.extensions = self.parse_extensions()
|
||||
self.task.documentation = self.parse_documentation()
|
||||
# And now I have to add more of the same crappy thing.
|
||||
self.task.data_input_associations = self.parse_incoming_data_references()
|
||||
self.task.data_output_associations = self.parse_outgoing_data_references()
|
||||
|
||||
self._detect_multiinstance()
|
||||
|
||||
boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.get_id())
|
||||
if boundary_event_nodes:
|
||||
parent_task = _BoundaryEventParent(
|
||||
self.spec, '%s.BoundaryEventParent' % self.get_id(),
|
||||
self.task, lane=self.task.lane)
|
||||
self.process_parser.parsed_nodes[
|
||||
self.node.get('id')] = parent_task
|
||||
parent_task.connect_outgoing(
|
||||
self.task, '%s.FromBoundaryEventParent' % self.get_id(),
|
||||
None, None)
|
||||
for boundary_event in boundary_event_nodes:
|
||||
b = self.process_parser.parse_node(boundary_event)
|
||||
if isinstance(b.event_definition, CancelEventDefinition) \
|
||||
and not isinstance(self.task, TransactionSubprocess):
|
||||
raise ValidationException(
|
||||
'Cancel Events may only be used with transactions',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
parent_task.connect_outgoing(
|
||||
b,
|
||||
'%s.FromBoundaryEventParent' % boundary_event.get(
|
||||
'id'),
|
||||
None, None)
|
||||
else:
|
||||
self.process_parser.parsed_nodes[
|
||||
self.node.get('id')] = self.task
|
||||
|
||||
children = []
|
||||
outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.get_id())
|
||||
if len(outgoing) > 1 and not self.handles_multiple_outgoing():
|
||||
raise ValidationException(
|
||||
'Multiple outgoing flows are not supported for '
|
||||
'tasks of type',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
for sequence_flow in outgoing:
|
||||
target_ref = sequence_flow.get('targetRef')
|
||||
try:
|
||||
target_node = one(self.doc_xpath('.//bpmn:*[@id="%s"]'% target_ref))
|
||||
except:
|
||||
raise ValidationException(
|
||||
'When looking for a task spec, we found two items, '
|
||||
'perhaps a form has the same ID? (%s)' % target_ref,
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
|
||||
c = self.process_parser.parse_node(target_node)
|
||||
position = c.position
|
||||
children.append((position, c, target_node, sequence_flow))
|
||||
|
||||
if children:
|
||||
# Sort children by their y coordinate.
|
||||
children = sorted(children, key=lambda tup: float(tup[0]["y"]))
|
||||
|
||||
default_outgoing = self.node.get('default')
|
||||
if not default_outgoing:
|
||||
if len(children) == 1 or not isinstance(self.task, ExclusiveGateway):
|
||||
(position, c, target_node, sequence_flow) = children[0]
|
||||
default_outgoing = sequence_flow.get('id')
|
||||
|
||||
for (position, c, target_node, sequence_flow) in children:
|
||||
self.connect_outgoing(
|
||||
c, target_node, sequence_flow,
|
||||
sequence_flow.get('id') == default_outgoing)
|
||||
|
||||
return parent_task if boundary_event_nodes else self.task
|
||||
except ValidationException:
|
||||
raise
|
||||
except Exception as ex:
|
||||
exc_info = sys.exc_info()
|
||||
tb = "".join(traceback.format_exception(
|
||||
exc_info[0], exc_info[1], exc_info[2]))
|
||||
raise ValidationException("%r" % (ex), node=self.node, filename=self.filename)
|
||||
|
||||
def get_task_spec_name(self, target_ref=None):
|
||||
"""
|
||||
Returns a unique task spec name for this task (or the targeted one)
|
||||
"""
|
||||
return target_ref or self.get_id()
|
||||
|
||||
def create_task(self):
|
||||
"""
|
||||
Create an instance of the task appropriately. A subclass can override
|
||||
this method to get extra information from the node.
|
||||
"""
|
||||
return self.spec_class(self.spec, self.get_task_spec_name(),
|
||||
lane=self.lane,
|
||||
description=self.node.get('name', None),
|
||||
position=self.position)
|
||||
|
||||
def connect_outgoing(self, outgoing_task, outgoing_task_node,
|
||||
sequence_flow_node, is_default):
|
||||
"""
|
||||
Connects this task to the indicating outgoing task, with the details in
|
||||
the sequence flow. A subclass can override this method to get extra
|
||||
information from the node.
|
||||
"""
|
||||
self.task.connect_outgoing(
|
||||
outgoing_task, sequence_flow_node.get('id'),
|
||||
sequence_flow_node.get(
|
||||
'name', None),
|
||||
self.parse_documentation(sequence_flow_node))
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
"""
|
||||
A subclass should override this method if the task supports multiple
|
||||
outgoing sequence flows.
|
||||
"""
|
||||
return False
|
|
@ -0,0 +1,54 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .util import BPMN_MODEL_NS
|
||||
|
||||
|
||||
class ValidationException(Exception):
|
||||
|
||||
"""
|
||||
A ValidationException should be thrown with enough information for the user
|
||||
to diagnose the problem and sort it out.
|
||||
|
||||
If available, please provide the offending XML node and filename.
|
||||
"""
|
||||
|
||||
def __init__(self, msg, node=None, filename=None, *args, **kwargs):
|
||||
if node is not None:
|
||||
self.tag = self._shorten_tag(node.tag)
|
||||
self.id = node.get('id', '<Unknown>')
|
||||
self.name = node.get('name', '<Unknown>')
|
||||
self.sourceline = getattr(node, 'sourceline', '<Unknown>')
|
||||
else:
|
||||
self.tag = '<Unknown>'
|
||||
self.id = '<Unknown>'
|
||||
self.name = '<Unknown>'
|
||||
self.sourceline = '<Unknown>'
|
||||
self.filename = filename or '<Unknown File>'
|
||||
message = ('%s\nSource Details: '
|
||||
'%s (id:%s), name \'%s\', line %s in %s') % (
|
||||
msg, self.tag, self.id, self.name, self.sourceline, self.filename)
|
||||
|
||||
super(ValidationException, self).__init__(message, *args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _shorten_tag(cls, tag):
|
||||
prefix = '{%s}' % BPMN_MODEL_NS
|
||||
if tag.startswith(prefix):
|
||||
return 'bpmn:' + tag[len(prefix):]
|
||||
return tag
|
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
|
@ -0,0 +1,231 @@
|
|||
from lxml import etree
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import CorrelationProperty
|
||||
|
||||
from .ValidationException import ValidationException
|
||||
from .TaskParser import TaskParser
|
||||
from .util import first, one
|
||||
from ..specs.events import (TimerEventDefinition, MessageEventDefinition,
|
||||
ErrorEventDefinition, EscalationEventDefinition,
|
||||
SignalEventDefinition,
|
||||
CancelEventDefinition, CycleTimerEventDefinition,
|
||||
TerminateEventDefinition, NoneEventDefinition)
|
||||
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
CANCEL_EVENT_XPATH = './/bpmn:cancelEventDefinition'
|
||||
ERROR_EVENT_XPATH = './/bpmn:errorEventDefinition'
|
||||
ESCALATION_EVENT_XPATH = './/bpmn:escalationEventDefinition'
|
||||
TERMINATION_EVENT_XPATH = './/bpmn:terminateEventDefinition'
|
||||
MESSAGE_EVENT_XPATH = './/bpmn:messageEventDefinition'
|
||||
SIGNAL_EVENT_XPATH = './/bpmn:signalEventDefinition'
|
||||
TIMER_EVENT_XPATH = './/bpmn:timerEventDefinition'
|
||||
|
||||
class EventDefinitionParser(TaskParser):
|
||||
"""This class provvides methods for parsing different event definitions."""
|
||||
|
||||
def parse_cancel_event(self):
|
||||
return CancelEventDefinition()
|
||||
|
||||
def parse_error_event(self, error_event):
|
||||
"""Parse the errorEventDefinition node and return an instance of ErrorEventDefinition."""
|
||||
error_ref = error_event.get('errorRef')
|
||||
if error_ref:
|
||||
error = one(self.doc_xpath('.//bpmn:error[@id="%s"]' % error_ref))
|
||||
error_code = error.get('errorCode')
|
||||
name = error.get('name')
|
||||
else:
|
||||
name, error_code = 'None Error Event', None
|
||||
return ErrorEventDefinition(name, error_code)
|
||||
|
||||
def parse_escalation_event(self, escalation_event):
|
||||
"""Parse the escalationEventDefinition node and return an instance of EscalationEventDefinition."""
|
||||
|
||||
escalation_ref = escalation_event.get('escalationRef')
|
||||
if escalation_ref:
|
||||
escalation = one(self.doc_xpath('.//bpmn:escalation[@id="%s"]' % escalation_ref))
|
||||
escalation_code = escalation.get('escalationCode')
|
||||
name = escalation.get('name')
|
||||
else:
|
||||
name, escalation_code = 'None Escalation Event', None
|
||||
return EscalationEventDefinition(name, escalation_code)
|
||||
|
||||
def parse_message_event(self, message_event):
|
||||
|
||||
message_ref = message_event.get('messageRef')
|
||||
if message_ref is not None:
|
||||
message = one(self.doc_xpath('.//bpmn:message[@id="%s"]' % message_ref))
|
||||
name = message.get('name')
|
||||
correlations = self.get_message_correlations(message_ref)
|
||||
else:
|
||||
name = message_event.getparent().get('name')
|
||||
correlations = {}
|
||||
return MessageEventDefinition(name, correlations)
|
||||
|
||||
def parse_signal_event(self, signal_event):
|
||||
"""Parse the signalEventDefinition node and return an instance of SignalEventDefinition."""
|
||||
|
||||
signal_ref = signal_event.get('signalRef')
|
||||
if signal_ref:
|
||||
signal = one(self.doc_xpath('.//bpmn:signal[@id="%s"]' % signal_ref))
|
||||
name = signal.get('name')
|
||||
else:
|
||||
name = signal_event.getparent().get('name')
|
||||
return SignalEventDefinition(name)
|
||||
|
||||
def parse_terminate_event(self):
|
||||
"""Parse the terminateEventDefinition node and return an instance of TerminateEventDefinition."""
|
||||
return TerminateEventDefinition()
|
||||
|
||||
def parse_timer_event(self):
|
||||
"""Parse the timerEventDefinition node and return an instance of TimerEventDefinition."""
|
||||
|
||||
try:
|
||||
time_date = first(self.xpath('.//bpmn:timeDate'))
|
||||
if time_date is not None:
|
||||
return TimerEventDefinition(self.node.get('name'), time_date.text)
|
||||
|
||||
time_duration = first(self.xpath('.//bpmn:timeDuration'))
|
||||
if time_duration is not None:
|
||||
return TimerEventDefinition(self.node.get('name'), time_duration.text)
|
||||
|
||||
time_cycle = first(self.xpath('.//bpmn:timeCycle'))
|
||||
if time_cycle is not None:
|
||||
return CycleTimerEventDefinition(self.node.get('name'), time_cycle.text)
|
||||
raise ValidationException("Unknown Time Specification", node=self.node, filename=self.filename)
|
||||
except Exception as e:
|
||||
raise ValidationException("Time Specification Error. " + str(e), node=self.node, filename=self.filename)
|
||||
|
||||
def get_message_correlations(self, message_ref):
|
||||
|
||||
correlations = []
|
||||
for correlation in self.doc_xpath(f".//bpmn:correlationPropertyRetrievalExpression[@messageRef='{message_ref}']"):
|
||||
key = correlation.getparent().get('id')
|
||||
children = correlation.getchildren()
|
||||
expression = children[0].text if len(children) > 0 else None
|
||||
used_by = [ e.getparent().get('name') for e in
|
||||
self.doc_xpath(f".//bpmn:correlationKey/bpmn:correlationPropertyRef[text()='{key}']") ]
|
||||
if key is not None and expression is not None:
|
||||
correlations.append(CorrelationProperty(key, expression, used_by))
|
||||
return correlations
|
||||
|
||||
def _create_task(self, event_definition, cancel_activity=None):
|
||||
|
||||
if isinstance(event_definition, MessageEventDefinition):
|
||||
for prop in event_definition.correlation_properties:
|
||||
for key in prop.correlation_keys:
|
||||
if key not in self.spec.correlation_keys:
|
||||
self.spec.correlation_keys[key] = []
|
||||
if prop.name not in self.spec.correlation_keys[key]:
|
||||
self.spec.correlation_keys[key].append(prop.name)
|
||||
|
||||
kwargs = {
|
||||
'lane': self.lane,
|
||||
'description': self.node.get('name', None),
|
||||
'position': self.position,
|
||||
}
|
||||
if cancel_activity is not None:
|
||||
kwargs['cancel_activity'] = cancel_activity
|
||||
return self.spec_class(self.spec, self.get_task_spec_name(), event_definition, **kwargs)
|
||||
|
||||
def get_event_definition(self, xpaths):
|
||||
"""Returns the first event definition it can find in given list of xpaths"""
|
||||
for path in xpaths:
|
||||
event = first(self.xpath(path))
|
||||
if event is not None:
|
||||
if path == MESSAGE_EVENT_XPATH:
|
||||
return self.parse_message_event(event)
|
||||
elif path == SIGNAL_EVENT_XPATH:
|
||||
return self.parse_signal_event(event)
|
||||
elif path == TIMER_EVENT_XPATH:
|
||||
return self.parse_timer_event()
|
||||
elif path == CANCEL_EVENT_XPATH:
|
||||
return self.parse_cancel_event()
|
||||
elif path == ERROR_EVENT_XPATH:
|
||||
return self.parse_error_event(event)
|
||||
elif path == ESCALATION_EVENT_XPATH:
|
||||
return self.parse_escalation_event(event)
|
||||
elif path == TERMINATION_EVENT_XPATH:
|
||||
return self.parse_terminate_event()
|
||||
return NoneEventDefinition()
|
||||
|
||||
class StartEventParser(EventDefinitionParser):
|
||||
"""Parses a Start Event, and connects it to the internal spec.start task.
|
||||
Support Message, Signal, and Timer events."""
|
||||
|
||||
def create_task(self):
|
||||
event_definition = self.get_event_definition([MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH])
|
||||
task = self._create_task(event_definition)
|
||||
self.spec.start.connect(task)
|
||||
if isinstance(event_definition, CycleTimerEventDefinition):
|
||||
# We are misusing cycle timers, so this is a hack whereby we will
|
||||
# revisit ourselves if we fire.
|
||||
task.connect(task)
|
||||
return task
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
return True
|
||||
|
||||
|
||||
class EndEventParser(EventDefinitionParser):
|
||||
"""Parses an End Event. Handles Termination, Escalation, Cancel, and Error End Events."""
|
||||
|
||||
def create_task(self):
|
||||
event_definition = self.get_event_definition([MESSAGE_EVENT_XPATH, CANCEL_EVENT_XPATH, ERROR_EVENT_XPATH,
|
||||
ESCALATION_EVENT_XPATH, TERMINATION_EVENT_XPATH])
|
||||
task = self._create_task(event_definition)
|
||||
task.connect_outgoing(self.spec.end, '%s.ToEndJoin' % self.node.get('id'), None, None)
|
||||
return task
|
||||
|
||||
|
||||
class IntermediateCatchEventParser(EventDefinitionParser):
|
||||
"""Parses an Intermediate Catch Event. Currently supports Message, Signal, and Timer definitions."""
|
||||
|
||||
def create_task(self):
|
||||
event_definition = self.get_event_definition([MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH])
|
||||
return super()._create_task(event_definition)
|
||||
|
||||
|
||||
class IntermediateThrowEventParser(EventDefinitionParser):
|
||||
"""Parses an Intermediate Catch Event. Currently supports Message, Signal and Timer event definitions."""
|
||||
|
||||
def create_task(self):
|
||||
event_definition = self.get_event_definition([ESCALATION_EVENT_XPATH, MESSAGE_EVENT_XPATH,
|
||||
SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH])
|
||||
return self._create_task(event_definition)
|
||||
|
||||
|
||||
class SendTaskParser(IntermediateThrowEventParser):
|
||||
|
||||
def create_task(self):
|
||||
|
||||
if self.node.get('messageRef') is not None:
|
||||
event_definition = self.parse_message_event(self.node)
|
||||
else:
|
||||
message_event = first(self.xpath(MESSAGE_EVENT_XPATH))
|
||||
if message_event is not None:
|
||||
event_definition = self.parse_message_event(message_event)
|
||||
else:
|
||||
event_definition = NoneEventDefinition()
|
||||
|
||||
return self._create_task(event_definition)
|
||||
|
||||
|
||||
class ReceiveTaskParser(SendTaskParser):
|
||||
"""Identical to the SendTaskParser - check for a message event definition"""
|
||||
pass
|
||||
|
||||
|
||||
class BoundaryEventParser(EventDefinitionParser):
|
||||
"""
|
||||
Parse a Catching Boundary Event. This extends the
|
||||
IntermediateCatchEventParser in order to parse the event definition.
|
||||
"""
|
||||
|
||||
def create_task(self):
|
||||
cancel_activity = self.node.get('cancelActivity', default='true').lower() == 'true'
|
||||
event_definition = self.get_event_definition([CANCEL_EVENT_XPATH, ERROR_EVENT_XPATH, ESCALATION_EVENT_XPATH,
|
||||
MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH])
|
||||
if isinstance(event_definition, NoneEventDefinition):
|
||||
raise NotImplementedError('Unsupported Catch Event: %r', etree.tostring(self.node))
|
||||
return self._create_task(event_definition, cancel_activity)
|
|
@ -0,0 +1,67 @@
|
|||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from .util import xpath_eval, first
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
class NodeParser:
|
||||
|
||||
def __init__(self, node, filename, doc_xpath, lane=None):
|
||||
|
||||
self.node = node
|
||||
self.filename = filename
|
||||
self.doc_xpath = doc_xpath
|
||||
self.xpath = xpath_eval(node)
|
||||
self.lane = self._get_lane() or lane
|
||||
self.position = self._get_position() or {'x': 0.0, 'y': 0.0}
|
||||
|
||||
def get_id(self):
|
||||
return self.node.get('id')
|
||||
|
||||
def parse_condition(self, sequence_flow):
|
||||
xpath = xpath_eval(sequence_flow)
|
||||
expression = first(xpath('.//bpmn:conditionExpression'))
|
||||
return expression.text if expression is not None else None
|
||||
|
||||
def parse_documentation(self, sequence_flow=None):
|
||||
xpath = xpath_eval(sequence_flow) if sequence_flow is not None else self.xpath
|
||||
documentation_node = first(xpath('.//bpmn:documentation'))
|
||||
return None if documentation_node is None else documentation_node.text
|
||||
|
||||
def parse_incoming_data_references(self):
|
||||
specs = []
|
||||
for name in self.xpath('.//bpmn:dataInputAssociation/bpmn:sourceRef'):
|
||||
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
else:
|
||||
raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.filename)
|
||||
return specs
|
||||
|
||||
def parse_outgoing_data_references(self):
|
||||
specs = []
|
||||
for name in self.xpath('.//bpmn:dataOutputAssociation/bpmn:targetRef'):
|
||||
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
else:
|
||||
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename)
|
||||
return specs
|
||||
|
||||
def parse_extensions(self, node=None):
|
||||
extensions = {}
|
||||
extra_ns = {'camunda': CAMUNDA_MODEL_NS}
|
||||
xpath = xpath_eval(self.node, extra_ns) if node is None else xpath_eval(node, extra_ns)
|
||||
extension_nodes = xpath( './/bpmn:extensionElements/camunda:properties/camunda:property')
|
||||
for node in extension_nodes:
|
||||
extensions[node.get('name')] = node.get('value')
|
||||
return extensions
|
||||
|
||||
def _get_lane(self):
|
||||
noderef = first(self.doc_xpath(f".//bpmn:flowNodeRef[text()='{self.get_id()}']"))
|
||||
if noderef is not None:
|
||||
return noderef.getparent().get('name')
|
||||
|
||||
def _get_position(self):
|
||||
bounds = first(self.doc_xpath(f".//bpmndi:BPMNShape[@bpmnElement='{self.get_id()}']//dc:Bounds"))
|
||||
if bounds is not None:
|
||||
return {'x': float(bounds.get('x', 0)), 'y': float(bounds.get('y', 0))}
|
|
@ -0,0 +1,219 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from .ValidationException import ValidationException
|
||||
from .TaskParser import TaskParser
|
||||
from .util import one, DEFAULT_NSMAP
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class UserTaskParser(TaskParser):
|
||||
|
||||
"""
|
||||
Base class for parsing User Tasks
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ManualTaskParser(UserTaskParser):
|
||||
|
||||
"""
|
||||
Base class for parsing Manual Tasks. Currently assumes that Manual Tasks
|
||||
should be treated the same way as User Tasks.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NoneTaskParser(UserTaskParser):
|
||||
|
||||
"""
|
||||
Base class for parsing unspecified Tasks. Currently assumes that such Tasks
|
||||
should be treated the same way as User Tasks.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ExclusiveGatewayParser(TaskParser):
|
||||
"""
|
||||
Parses an Exclusive Gateway, setting up the outgoing conditions
|
||||
appropriately.
|
||||
"""
|
||||
|
||||
def connect_outgoing(self, outgoing_task, outgoing_task_node,
|
||||
sequence_flow_node, is_default):
|
||||
if is_default:
|
||||
super(ExclusiveGatewayParser, self).connect_outgoing(
|
||||
outgoing_task, outgoing_task_node, sequence_flow_node,
|
||||
is_default)
|
||||
else:
|
||||
cond = self.parse_condition(sequence_flow_node)
|
||||
if cond is None:
|
||||
raise ValidationException(
|
||||
'Non-default exclusive outgoing sequence flow '
|
||||
' without condition',
|
||||
sequence_flow_node,
|
||||
self.filename)
|
||||
self.task.connect_outgoing_if(
|
||||
cond, outgoing_task,
|
||||
sequence_flow_node.get('id'),
|
||||
sequence_flow_node.get('name', None),
|
||||
self.parse_documentation(sequence_flow_node))
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
return True
|
||||
|
||||
|
||||
class ParallelGatewayParser(TaskParser):
|
||||
|
||||
"""
|
||||
Parses a Parallel Gateway.
|
||||
"""
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
return True
|
||||
|
||||
|
||||
class InclusiveGatewayParser(TaskParser):
|
||||
|
||||
"""
|
||||
Parses an Inclusive Gateway.
|
||||
"""
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
"""
|
||||
At the moment I haven't implemented support for diverging inclusive
|
||||
gateways
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
class SubprocessParser:
|
||||
|
||||
# Not really a fan of this, but I need a way of calling these methods from a task
|
||||
# parser that extends the base parser to override extension parsing. I can't inherit
|
||||
# from my extended task parser AND the original subworkflow parsers because they
|
||||
# both inherit from the same base.
|
||||
|
||||
@staticmethod
|
||||
def get_subprocess_spec(task_parser):
|
||||
|
||||
workflow_start_event = task_parser.xpath('./bpmn:startEvent')
|
||||
workflow_end_event = task_parser.xpath('./bpmn:endEvent')
|
||||
if len(workflow_start_event) != 1:
|
||||
raise ValidationException(
|
||||
'Multiple Start points are not allowed in SubWorkflow Task',
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
if len(workflow_end_event) == 0:
|
||||
raise ValidationException(
|
||||
'A SubWorkflow Must contain an End event',
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
|
||||
nsmap = DEFAULT_NSMAP.copy()
|
||||
nsmap['camunda'] = "http://camunda.org/schema/1.0/bpmn"
|
||||
nsmap['di'] = "http://www.omg.org/spec/DD/20100524/DI"
|
||||
|
||||
# Create wrapper xml for the subworkflow
|
||||
for ns, val in nsmap.items():
|
||||
etree.register_namespace(ns, val)
|
||||
|
||||
task_parser.process_parser.parser.create_parser(
|
||||
task_parser.node,
|
||||
doc_xpath=task_parser.doc_xpath,
|
||||
filename=task_parser.filename,
|
||||
lane=task_parser.lane
|
||||
)
|
||||
return task_parser.node.get('id')
|
||||
|
||||
@staticmethod
|
||||
def get_call_activity_spec(task_parser):
|
||||
|
||||
called_element = task_parser.node.get('calledElement', None)
|
||||
if not called_element:
|
||||
raise ValidationException(
|
||||
'No "calledElement" attribute for Call Activity.',
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
parser = task_parser.process_parser.parser.get_process_parser(called_element)
|
||||
if parser is None:
|
||||
raise ValidationException(
|
||||
f"The process '{called_element}' was not found. Did you mean one of the following: "
|
||||
f"{', '.join(task_parser.process_parser.parser.get_process_ids())}?",
|
||||
node=task_parser.node,
|
||||
filename=task_parser.filename)
|
||||
return called_element
|
||||
|
||||
|
||||
class SubWorkflowParser(TaskParser):
|
||||
|
||||
def create_task(self):
|
||||
subworkflow_spec = SubprocessParser.get_subprocess_spec(self)
|
||||
return self.spec_class(
|
||||
self.spec, self.get_task_spec_name(), subworkflow_spec,
|
||||
lane=self.lane, position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
|
||||
class CallActivityParser(TaskParser):
|
||||
"""Parses a CallActivity node."""
|
||||
|
||||
def create_task(self):
|
||||
subworkflow_spec = SubprocessParser.get_call_activity_spec(self)
|
||||
return self.spec_class(
|
||||
self.spec, self.get_task_spec_name(), subworkflow_spec,
|
||||
lane=self.lane, position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
|
||||
class ScriptTaskParser(TaskParser):
|
||||
"""
|
||||
Parses a script task
|
||||
"""
|
||||
|
||||
def create_task(self):
|
||||
script = self.get_script()
|
||||
return self.spec_class(self.spec, self.get_task_spec_name(), script,
|
||||
lane=self.lane,
|
||||
position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
def get_script(self):
|
||||
"""
|
||||
Gets the script content from the node. A subclass can override this
|
||||
method, if the script needs to be pre-parsed. The result of this call
|
||||
will be passed to the Script Engine for execution.
|
||||
"""
|
||||
try:
|
||||
return one(self.xpath('.//bpmn:script')).text
|
||||
except AssertionError as ae:
|
||||
raise ValidationException(
|
||||
f"Invalid Script Task. No Script Provided. " + str(ae),
|
||||
node=self.node, filename=self.filename)
|
||||
|
||||
|
||||
class ServiceTaskParser(TaskParser):
|
||||
|
||||
"""
|
||||
Parses a ServiceTask node.
|
||||
"""
|
||||
pass
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
|
||||
BPMN_MODEL_NS = 'http://www.omg.org/spec/BPMN/20100524/MODEL'
|
||||
DIAG_INTERCHANGE_NS = "http://www.omg.org/spec/BPMN/20100524/DI"
|
||||
DIAG_COMMON_NS = "http://www.omg.org/spec/DD/20100524/DC"
|
||||
|
||||
DEFAULT_NSMAP = {
|
||||
'bpmn': BPMN_MODEL_NS,
|
||||
'dc': DIAG_COMMON_NS,
|
||||
'bpmndi': DIAG_INTERCHANGE_NS,
|
||||
}
|
||||
|
||||
def one(nodes, or_none=False):
|
||||
"""
|
||||
Assert that there is exactly one node in the give list, and return it.
|
||||
"""
|
||||
if not nodes and or_none:
|
||||
return None
|
||||
assert len(
|
||||
nodes) == 1, 'Expected 1 result. Received %d results.' % (len(nodes))
|
||||
return nodes[0]
|
||||
|
||||
|
||||
def first(nodes):
|
||||
"""
|
||||
Return the first node in the given list, or None, if the list is empty.
|
||||
"""
|
||||
if len(nodes) >= 1:
|
||||
return nodes[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def xpath_eval(node, extra_ns=None):
|
||||
"""
|
||||
Returns an XPathEvaluator, with namespace prefixes 'bpmn' for
|
||||
http://www.omg.org/spec/BPMN/20100524/MODEL, and additional specified ones
|
||||
"""
|
||||
namespaces = DEFAULT_NSMAP.copy()
|
||||
if extra_ns:
|
||||
namespaces.update(extra_ns)
|
||||
return lambda path: node.xpath(path, namespaces=namespaces)
|
||||
|
||||
|
||||
def full_tag(tag):
|
||||
"""
|
||||
Return the full tag name including namespace for the given BPMN tag. In
|
||||
other words, the name with namespace
|
||||
http://www.omg.org/spec/BPMN/20100524/MODEL
|
||||
"""
|
||||
return '{%s}%s' % (BPMN_MODEL_NS, tag)
|
|
@ -0,0 +1,178 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2020 Matthew Hampton, Dan Funk
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import configparser
|
||||
from io import BytesIO, TextIOWrapper
|
||||
from warnings import warn
|
||||
|
||||
from lxml import etree
|
||||
import zipfile
|
||||
import os
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from ...bpmn.specs.SubWorkflowTask import SubWorkflowTask
|
||||
from ...bpmn.workflow import BpmnWorkflow
|
||||
from ...bpmn.serializer.json import BPMNJSONSerializer
|
||||
from ..parser.BpmnParser import BpmnParser
|
||||
from .Packager import Packager
|
||||
|
||||
class BpmnSerializer(BPMNJSONSerializer):
|
||||
"""
|
||||
DEPRECATED --- This call remains available only so that folks can deserialize
|
||||
existing workflows.
|
||||
The BpmnSerializer class provides support for deserializing a Bpmn Workflow
|
||||
Spec from a BPMN package. The BPMN package must have been created using the
|
||||
:class:`SpiffWorkflow.bpmn.serializer.Packager`.
|
||||
|
||||
It will also use the appropriate subclass of BpmnParser, if one is included
|
||||
in the metadata.ini file.
|
||||
"""
|
||||
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
"""This throws a deprecation warning on subclassing."""
|
||||
warn(f'{cls.__name__} is deprecated. Please use '
|
||||
f'bpmn.serializer.workflow.BpmnWorkflowSerializer',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
def serialize_workflow(self, workflow, **kwargs):
|
||||
"""
|
||||
*** DEPRECATED *** DEPRECATED *** DEPRECATED *** DEPRECATED ***
|
||||
Serializes the workflow data and task tree. Will also serialize
|
||||
the Spec if 'include_spec' kwarg is not set to false.
|
||||
Please use bpmn.serializer.workflow.BpmnWorkflowSerializer for
|
||||
Serialization. This class remains available only to help transition
|
||||
to the new Serialization scheme.
|
||||
"""
|
||||
"""This throws a deprecation warning on initialization."""
|
||||
warn(f'{self.__class__.__name__} is deprecated. DO NOT continue to '
|
||||
f'use it for serialization. Deserialize your old workflows, then'
|
||||
f'move to the new serializer for storing. See '
|
||||
f'bpmn.serializer.workflow.BpmnWorkflowSerializer',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
assert isinstance(workflow, BpmnWorkflow)
|
||||
include_spec = kwargs.get('include_spec',True)
|
||||
return super().serialize_workflow(workflow, include_spec=include_spec)
|
||||
|
||||
def serialize_task(self, task, skip_children=False, **kwargs):
|
||||
return super().serialize_task(task,
|
||||
skip_children=skip_children,
|
||||
allow_subs=True)
|
||||
|
||||
def deserialize_workflow(self, s_state, workflow_spec=None,
|
||||
read_only=False, **kwargs):
|
||||
|
||||
return super().deserialize_workflow(s_state,
|
||||
wf_class=BpmnWorkflow,
|
||||
wf_spec=workflow_spec,
|
||||
read_only=read_only,
|
||||
**kwargs)
|
||||
|
||||
def _deserialize_task_children(self, task, s_state):
|
||||
"""Reverses the internal process that will merge children from a
|
||||
sub-workflow in the top level workflow. This copies the states
|
||||
back into the sub-workflow after generating it from the base spec"""
|
||||
if not isinstance(task.task_spec, SubWorkflowTask):
|
||||
return super()._deserialize_task_children(task, s_state)
|
||||
|
||||
sub_workflow = task.task_spec.create_sub_workflow(task)
|
||||
children = []
|
||||
for c in s_state['children']:
|
||||
# One child belongs to the parent workflow (The path back
|
||||
# out of the subworkflow) the other children belong to the
|
||||
# sub-workflow.
|
||||
|
||||
# We need to determine if we are still in the same workflow,
|
||||
# Ideally we can just check: if c['workflow_name'] == sub_workflow.name
|
||||
# however, we need to support deserialization of workflows without this
|
||||
# critical property, at least temporarily, so people can migrate.
|
||||
if 'workflow_name' in c:
|
||||
same_workflow = c['workflow_name'] == sub_workflow.name
|
||||
else:
|
||||
same_workflow = sub_workflow.get_tasks_from_spec_name(c['task_spec'])
|
||||
|
||||
if same_workflow:
|
||||
start_task = self.deserialize_task(sub_workflow, c)
|
||||
children.append(start_task)
|
||||
start_task.parent = task.id
|
||||
sub_workflow.task_tree = start_task
|
||||
# get a list of tasks in reverse order of change
|
||||
# our last task should be on the top.
|
||||
tasks = sub_workflow.get_tasks(TaskState.COMPLETED)
|
||||
tasks.sort(key=lambda x: x.last_state_change,reverse=True)
|
||||
if len(tasks)>0:
|
||||
last_task = tasks[0]
|
||||
sub_workflow.last_task = last_task
|
||||
else:
|
||||
resume_task = self.deserialize_task(task.workflow, c)
|
||||
resume_task.parent = task.id
|
||||
children.append(resume_task)
|
||||
return children
|
||||
|
||||
def deserialize_task(self, workflow, s_state):
|
||||
assert isinstance(workflow, BpmnWorkflow)
|
||||
return super().deserialize_task(workflow, s_state)
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, filename=None):
|
||||
"""
|
||||
:param s_state: a byte-string with the contents of the packaged
|
||||
workflow archive, or a file-like object.
|
||||
|
||||
:param filename: the name of the package file.
|
||||
"""
|
||||
if isinstance(s_state,dict):
|
||||
return super().deserialize_workflow_spec(s_state)
|
||||
if isinstance(s_state,str):
|
||||
return super().deserialize_workflow_spec(s_state)
|
||||
if isinstance(s_state, bytes):
|
||||
s_state = BytesIO(s_state)
|
||||
|
||||
package_zip = zipfile.ZipFile(
|
||||
s_state, "r", compression=zipfile.ZIP_DEFLATED)
|
||||
config = configparser.ConfigParser()
|
||||
ini_fp = TextIOWrapper(
|
||||
package_zip.open(Packager.METADATA_FILE), encoding="UTF-8")
|
||||
try:
|
||||
config.read_file(ini_fp)
|
||||
finally:
|
||||
ini_fp.close()
|
||||
|
||||
parser_class = BpmnParser
|
||||
parser_class_module = config.get(
|
||||
'MetaData', 'parser_class_module', fallback=None)
|
||||
|
||||
if parser_class_module:
|
||||
mod = __import__(parser_class_module, fromlist=[
|
||||
config.get('MetaData', 'parser_class')])
|
||||
parser_class = getattr(mod, config.get('MetaData', 'parser_class'))
|
||||
|
||||
parser = parser_class()
|
||||
|
||||
for info in package_zip.infolist():
|
||||
parts = os.path.split(info.filename)
|
||||
if (len(parts) == 2 and not parts[0] and parts[1].lower().endswith('.bpmn')):
|
||||
# It is in the root of the ZIP and is a BPMN file
|
||||
bpmn_fp = package_zip.open(info)
|
||||
try:
|
||||
bpmn = etree.parse(bpmn_fp)
|
||||
finally:
|
||||
bpmn_fp.close()
|
||||
|
||||
parser.add_bpmn_xml(bpmn, filename='%s:%s' % (filename, info.filename))
|
||||
spec_name = config.get('MetaData', 'entry_point_process')
|
||||
return parser.get_spec(spec_name)
|
|
@ -0,0 +1,483 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from builtins import str
|
||||
from builtins import hex
|
||||
from builtins import range
|
||||
from builtins import object
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from collections import deque
|
||||
import json
|
||||
from ...task import TaskState
|
||||
from ...specs import SubWorkflow
|
||||
from ...serializer.base import Serializer
|
||||
from ..workflow import BpmnWorkflow
|
||||
|
||||
|
||||
class UnrecoverableWorkflowChange(Exception):
|
||||
"""
|
||||
This is thrown if the workflow cannot be restored because the workflow spec
|
||||
has changed, and the identified transitions no longer exist.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class _RouteNode(object):
|
||||
"""
|
||||
Private helper class
|
||||
"""
|
||||
|
||||
def __init__(self, task_spec, outgoing_route_node=None):
|
||||
self.task_spec = task_spec
|
||||
self.outgoing = [outgoing_route_node] if outgoing_route_node else []
|
||||
self.state = None
|
||||
|
||||
def get_outgoing_by_spec(self, task_spec):
|
||||
m = [r for r in self.outgoing if r.task_spec == task_spec]
|
||||
return m[0] if m else None
|
||||
|
||||
def to_list(self):
|
||||
result = []
|
||||
n = self
|
||||
while n.outgoing:
|
||||
assert len(
|
||||
n.outgoing) == 1, "to_list(..) cannot be called after a merge"
|
||||
result.append(n.task_spec)
|
||||
n = n.outgoing[0]
|
||||
result.append(n.task_spec)
|
||||
return result
|
||||
|
||||
def contains(self, other_route):
|
||||
if isinstance(other_route, list):
|
||||
return self.to_list()[0:len(other_route)] == other_route
|
||||
|
||||
# This only works before merging
|
||||
assert len(other_route.outgoing) <= 1,\
|
||||
"contains(..) cannot be called after a merge"
|
||||
assert len(self.outgoing) <= 1,\
|
||||
"contains(..) cannot be called after a merge"
|
||||
|
||||
if other_route.task_spec == self.task_spec:
|
||||
if other_route.outgoing and self.outgoing:
|
||||
return self.outgoing[0].contains(other_route.outgoing[0])
|
||||
elif self.outgoing:
|
||||
return True
|
||||
elif not other_route.outgoing:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class _BpmnProcessSpecState(object):
|
||||
|
||||
"""
|
||||
Private helper class
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
self.route = None
|
||||
|
||||
def get_path_to_transition(self, transition, state, workflow_parents,
|
||||
taken_routes=None):
|
||||
# find a route passing through each task:
|
||||
route = [self.spec.start]
|
||||
route_to_parent_complete = None
|
||||
for task_name in workflow_parents:
|
||||
route = self._breadth_first_task_search(str(task_name), route)
|
||||
if route is None:
|
||||
raise UnrecoverableWorkflowChange(
|
||||
'No path found for route \'%s\'' % transition)
|
||||
route_to_parent_complete = route + [route[-1].outputs[0]]
|
||||
route = route + [route[-1].spec.start]
|
||||
route = self._breadth_first_transition_search(
|
||||
transition, route, taken_routes=taken_routes)
|
||||
if route is None:
|
||||
raise UnrecoverableWorkflowChange(
|
||||
'No path found for route \'%s\'' % transition)
|
||||
outgoing_route_node = None
|
||||
for spec in reversed(route):
|
||||
outgoing_route_node = _RouteNode(spec, outgoing_route_node)
|
||||
outgoing_route_node.state = state
|
||||
return outgoing_route_node, route_to_parent_complete
|
||||
|
||||
def add_route(self, outgoing_route_node):
|
||||
if self.route:
|
||||
self._merge_routes(self.route, outgoing_route_node)
|
||||
else:
|
||||
self.route = outgoing_route_node
|
||||
|
||||
def dump(self):
|
||||
print(self.get_dump())
|
||||
|
||||
def get_dump(self):
|
||||
def recursive_dump(route_node, indent, verbose=False):
|
||||
|
||||
task_spec = route_node.task_spec
|
||||
dump = '%s (%s:%s)' % (
|
||||
task_spec.name,
|
||||
task_spec.__class__.__name__,
|
||||
hex(id(task_spec))) + '\n'
|
||||
if verbose:
|
||||
if task_spec.inputs:
|
||||
dump += indent + '- IN: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t)))
|
||||
for t in task_spec.inputs]) + '\n'
|
||||
if task_spec.outputs:
|
||||
dump += indent + '- OUT: ' + \
|
||||
','.join(['%s (%s)' % (t.name, hex(id(t)))
|
||||
for t in task_spec.outputs]) + '\n'
|
||||
|
||||
for i, t in enumerate(route_node.outgoing):
|
||||
dump += indent + ' --> ' + \
|
||||
recursive_dump(
|
||||
t, indent + (
|
||||
' | ' if i + 1 < len(route_node.outgoing)
|
||||
else ' '))
|
||||
return dump
|
||||
|
||||
dump = recursive_dump(self.route, '')
|
||||
return dump
|
||||
|
||||
def go(self, workflow):
|
||||
leaf_tasks = []
|
||||
self._go(workflow.task_tree.children[0], self.route, leaf_tasks)
|
||||
for task in sorted(
|
||||
leaf_tasks,
|
||||
key=lambda t: 0 if getattr(
|
||||
t, '_bpmn_load_target_state', TaskState.READY) == TaskState.READY
|
||||
else 1):
|
||||
task.task_spec._update(task)
|
||||
task._inherit_data()
|
||||
if hasattr(task, '_bpmn_load_target_state'):
|
||||
delattr(task, '_bpmn_load_target_state')
|
||||
|
||||
def _go(self, task, route_node, leaf_tasks):
|
||||
assert task.task_spec == route_node.task_spec
|
||||
if not route_node.outgoing:
|
||||
assert route_node.state is not None
|
||||
setattr(task, '_bpmn_load_target_state', route_node.state)
|
||||
leaf_tasks.append(task)
|
||||
else:
|
||||
if not task._is_finished():
|
||||
if (issubclass(task.task_spec.__class__, SubWorkflow) and
|
||||
task.task_spec.spec.start in
|
||||
[o.task_spec for o in route_node.outgoing]):
|
||||
self._go_in_to_subworkflow(
|
||||
task, [n.task_spec for n in route_node.outgoing])
|
||||
else:
|
||||
self._complete_task_silent(
|
||||
task, [n.task_spec for n in route_node.outgoing])
|
||||
for n in route_node.outgoing:
|
||||
matching_child = [
|
||||
t for t in task.children if t.task_spec == n.task_spec]
|
||||
assert len(matching_child) == 1
|
||||
self._go(matching_child[0], n, leaf_tasks)
|
||||
|
||||
def _complete_task_silent(self, task, target_children_specs):
|
||||
# This method simulates the completing of a task, but without hooks
|
||||
# being called, and targeting a specific subset of the children
|
||||
if task._is_finished():
|
||||
return
|
||||
task._set_state(TaskState.COMPLETED)
|
||||
|
||||
task.children = []
|
||||
for task_spec in target_children_specs:
|
||||
task._add_child(task_spec)
|
||||
|
||||
def _go_in_to_subworkflow(self, my_task, target_children_specs):
|
||||
# This method simulates the entering of a subworkflow, but without
|
||||
# hooks being called, and targeting a specific subset of the entry
|
||||
# tasks in the subworkflow. It creates the new workflow instance and
|
||||
# merges it in to the tree This is based on
|
||||
# SubWorkflow._on_ready_before_hook(..)
|
||||
if my_task._is_finished():
|
||||
return
|
||||
|
||||
subworkflow = my_task.task_spec._create_subworkflow(my_task)
|
||||
subworkflow.completed_event.connect(
|
||||
my_task.task_spec._on_subworkflow_completed, my_task)
|
||||
|
||||
# Create the children (these are the tasks that follow the subworkflow,
|
||||
# on completion:
|
||||
my_task.children = []
|
||||
my_task._sync_children(my_task.task_spec.outputs, TaskState.FUTURE)
|
||||
for t in my_task.children:
|
||||
t.task_spec._predict(t)
|
||||
|
||||
# Integrate the tree of the subworkflow into the tree of this workflow.
|
||||
for child in subworkflow.task_tree.children:
|
||||
if child.task_spec in target_children_specs:
|
||||
my_task.children.insert(0, child)
|
||||
child.parent = my_task
|
||||
|
||||
my_task._set_internal_data(subworkflow=subworkflow)
|
||||
|
||||
my_task._set_state(TaskState.COMPLETED)
|
||||
|
||||
def _merge_routes(self, target, src):
|
||||
assert target.task_spec == src.task_spec
|
||||
for out_route in src.outgoing:
|
||||
target_out_route = target.get_outgoing_by_spec(out_route.task_spec)
|
||||
if target_out_route:
|
||||
self._merge_routes(target_out_route, out_route)
|
||||
else:
|
||||
target.outgoing.append(out_route)
|
||||
|
||||
def _breadth_first_transition_search(self, transition_id, starting_route,
|
||||
taken_routes=None):
|
||||
return self._breadth_first_search(starting_route,
|
||||
transition_id=transition_id,
|
||||
taken_routes=taken_routes)
|
||||
|
||||
def _breadth_first_task_search(self, task_name, starting_route):
|
||||
return self._breadth_first_search(starting_route, task_name=task_name)
|
||||
|
||||
def _breadth_first_search(self, starting_route, task_name=None,
|
||||
transition_id=None, taken_routes=None):
|
||||
q = deque()
|
||||
done = set()
|
||||
q.append(starting_route)
|
||||
while q:
|
||||
route = q.popleft()
|
||||
if not route[-1] == starting_route[-1]:
|
||||
if task_name and route[-1].name == task_name:
|
||||
return route
|
||||
if (transition_id and
|
||||
hasattr(route[-1], 'has_outgoing_sequence_flow') and
|
||||
route[-1].has_outgoing_sequence_flow(transition_id)):
|
||||
spec = route[-1].get_outgoing_sequence_flow_by_id(
|
||||
transition_id).target_task_spec
|
||||
if taken_routes:
|
||||
final_route = route + [spec]
|
||||
for taken in taken_routes:
|
||||
t = taken.to_list() if not isinstance(
|
||||
taken, list) else taken
|
||||
if final_route[0:len(t)] == t:
|
||||
spec = None
|
||||
break
|
||||
if spec:
|
||||
route.append(spec)
|
||||
return route
|
||||
for child in route[-1].outputs:
|
||||
new_route = route + [child]
|
||||
if len(new_route) > 10000:
|
||||
raise ValueError("Maximum looping limit exceeded "
|
||||
"searching for path to % s" %
|
||||
(task_name or transition_id))
|
||||
new_route_r = tuple(new_route)
|
||||
if new_route_r not in done:
|
||||
done.add(new_route_r)
|
||||
q.append(new_route)
|
||||
return None
|
||||
|
||||
|
||||
class CompactWorkflowSerializer(Serializer):
|
||||
"""
|
||||
This class provides an implementation of serialize_workflow and
|
||||
deserialize_workflow that produces a compact representation of the workflow
|
||||
state, that can be stored in a database column or reasonably small size.
|
||||
|
||||
It records ONLY enough information to identify the transition leading in to
|
||||
each WAITING or READY state, along with the state of that task. This is
|
||||
generally enough to resurrect a running BPMN workflow instance, with some
|
||||
limitations.
|
||||
|
||||
Limitations:
|
||||
1. The compact representation does not include any workflow or task data.
|
||||
It is the responsibility of the calling application to record whatever
|
||||
data is relevant to it, and set it on the restored workflow.
|
||||
2. The restoring process will not produce exactly the same workflow tree -
|
||||
it finds the SHORTEST route to the saved READY and WAITING tasks, not
|
||||
the route that was actually taken. This means that the tree cannot be
|
||||
interrogated for historical information about the workflow. However, the
|
||||
workflow does follow the same logic paths as would have been followed by
|
||||
the original workflow.
|
||||
"""
|
||||
|
||||
STATE_SPEC_VERSION = 1
|
||||
|
||||
def serialize_workflow_spec(self, wf_spec, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"The CompactWorkflowSerializer only supports "
|
||||
" workflow serialization.")
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"The CompactWorkflowSerializer only supports "
|
||||
"workflow serialization.")
|
||||
|
||||
def serialize_workflow(self, workflow, include_spec=False, **kwargs):
|
||||
"""
|
||||
:param workflow: the workflow instance to serialize
|
||||
|
||||
:param include_spec: Always set to False (The CompactWorkflowSerializer
|
||||
only supports workflow serialization)
|
||||
"""
|
||||
if include_spec:
|
||||
raise NotImplementedError(
|
||||
'Including the spec serialization with the workflow state '
|
||||
'is not implemented.')
|
||||
return self._get_workflow_state(workflow)
|
||||
|
||||
def deserialize_workflow(self, s_state, workflow_spec=None,
|
||||
read_only=False, **kwargs):
|
||||
"""
|
||||
:param s_state: the state of the workflow as returned by
|
||||
serialize_workflow
|
||||
|
||||
:param workflow_spec: the Workflow Spec of the workflow
|
||||
(CompactWorkflowSerializer only supports workflow serialization)
|
||||
|
||||
:param read_only: (Optional) True if the workflow should be restored in
|
||||
READ ONLY mode
|
||||
|
||||
NB: Additional kwargs passed to the deserialize_workflow method will be
|
||||
passed to the new_workflow method.
|
||||
"""
|
||||
if workflow_spec is None:
|
||||
raise NotImplementedError(
|
||||
'Including the spec serialization with the workflow state is '
|
||||
' not implemented. A \'workflow_spec\' must '
|
||||
'be provided.')
|
||||
workflow = self.new_workflow(
|
||||
workflow_spec, read_only=read_only, **kwargs)
|
||||
self._restore_workflow_state(workflow, s_state)
|
||||
return workflow
|
||||
|
||||
def new_workflow(self, workflow_spec, read_only=False, **kwargs):
|
||||
"""
|
||||
Create a new workflow instance from the given spec and arguments.
|
||||
|
||||
:param workflow_spec: the workflow spec to use
|
||||
|
||||
:param read_only: this should be in read only mode
|
||||
|
||||
:param kwargs: Any extra kwargs passed to the deserialize_workflow
|
||||
method will be passed through here
|
||||
"""
|
||||
return BpmnWorkflow(workflow_spec, read_only=read_only, **kwargs)
|
||||
|
||||
def _get_workflow_state(self, workflow):
|
||||
active_tasks = workflow.get_tasks(state=(TaskState.READY | TaskState.WAITING))
|
||||
states = []
|
||||
|
||||
for task in active_tasks:
|
||||
parent_task_spec = task.parent.task_spec
|
||||
transition = parent_task_spec.get_outgoing_sequence_flow_by_spec(
|
||||
task.task_spec).id
|
||||
w = task.workflow
|
||||
workflow_parents = []
|
||||
while w.outer_workflow and w.outer_workflow != w:
|
||||
workflow_parents.append(w.name)
|
||||
w = w.outer_workflow
|
||||
state = ("W" if task.state == TaskState.WAITING else "R")
|
||||
states.append(
|
||||
[transition, list(reversed(workflow_parents)), state])
|
||||
|
||||
compacted_states = []
|
||||
for state in sorted(states,
|
||||
key=lambda s: ",".join([s[0],
|
||||
s[2],
|
||||
(':'.join(s[1]))])):
|
||||
if state[-1] == 'R':
|
||||
state.pop()
|
||||
if state[-1] == []:
|
||||
state.pop()
|
||||
if len(state) == 1:
|
||||
state = state[0]
|
||||
compacted_states.append(state)
|
||||
|
||||
state_list = compacted_states + [self.STATE_SPEC_VERSION]
|
||||
state_s = json.dumps(state_list)[1:-1]
|
||||
return state_s
|
||||
|
||||
def _restore_workflow_state(self, workflow, state):
|
||||
state_list = json.loads('[' + state + ']')
|
||||
|
||||
self._check_spec_version(state_list[-1])
|
||||
|
||||
s = _BpmnProcessSpecState(workflow.spec)
|
||||
|
||||
routes = []
|
||||
for state in state_list[:-1]:
|
||||
if isinstance(state, str) or type(state).__name__ == 'str':
|
||||
state = [str(state)]
|
||||
transition = state[0]
|
||||
workflow_parents = state[1] if len(state) > 1 else []
|
||||
state = (TaskState.WAITING if len(state) >
|
||||
2 and state[2] == 'W' else TaskState.READY)
|
||||
|
||||
route, route_to_parent_complete = s.get_path_to_transition(
|
||||
transition, state, workflow_parents)
|
||||
routes.append(
|
||||
(route, route_to_parent_complete, transition, state,
|
||||
workflow_parents))
|
||||
|
||||
retry = True
|
||||
retry_count = 0
|
||||
while (retry):
|
||||
if retry_count > 100:
|
||||
raise ValueError(
|
||||
'Maximum retry limit exceeded searching for unique paths')
|
||||
retry = False
|
||||
|
||||
for i in range(len(routes)):
|
||||
(route, route_to_parent_complete, transition, state,
|
||||
workflow_parents) = routes[i]
|
||||
|
||||
for j in range(len(routes)):
|
||||
if i == j:
|
||||
continue
|
||||
other_route = routes[j][0]
|
||||
route_to_parent_complete = routes[j][1]
|
||||
if route.contains(other_route) or (
|
||||
route_to_parent_complete and route.contains(
|
||||
route_to_parent_complete)):
|
||||
taken_routes = [r for r in routes if r[0] != route]
|
||||
taken_routes = [r for r in [r[0] for r
|
||||
in taken_routes] +
|
||||
[r[1] for r in taken_routes] if r]
|
||||
(route,
|
||||
route_to_parent_complete) = s.get_path_to_transition(
|
||||
transition, state, workflow_parents,
|
||||
taken_routes=taken_routes)
|
||||
for r in taken_routes:
|
||||
assert not route.contains(r)
|
||||
routes[
|
||||
i] = (route, route_to_parent_complete, transition,
|
||||
state, workflow_parents)
|
||||
retry = True
|
||||
retry_count += 1
|
||||
break
|
||||
if retry:
|
||||
break
|
||||
|
||||
for r in routes:
|
||||
s.add_route(r[0])
|
||||
|
||||
workflow._busy_with_restore = True
|
||||
try:
|
||||
if len(state_list) <= 1:
|
||||
workflow.cancel(success=True)
|
||||
return
|
||||
s.go(workflow)
|
||||
finally:
|
||||
workflow._busy_with_restore = False
|
||||
|
||||
def _check_spec_version(self, v):
|
||||
# We only have one version right now:
|
||||
assert v == self.STATE_SPEC_VERSION
|
|
@ -0,0 +1,548 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from builtins import object
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import os
|
||||
import configparser
|
||||
import glob
|
||||
import hashlib
|
||||
import inspect
|
||||
import zipfile
|
||||
from io import StringIO
|
||||
from optparse import OptionParser, OptionGroup
|
||||
from ..parser.BpmnParser import BpmnParser
|
||||
from ..parser.ValidationException import ValidationException
|
||||
from ..parser.util import xpath_eval, one
|
||||
from lxml import etree
|
||||
SIGNAVIO_NS = 'http://www.signavio.com'
|
||||
CONFIG_SECTION_NAME = "Packager Options"
|
||||
|
||||
|
||||
def md5hash(data):
|
||||
if not isinstance(data, bytes):
|
||||
data = data.encode('UTF-8')
|
||||
|
||||
return hashlib.md5(data).hexdigest().lower()
|
||||
|
||||
|
||||
class Packager(object):
|
||||
"""
|
||||
The Packager class pre-parses a set of BPMN files (together with their SVG
|
||||
representation), validates the contents and then produces a ZIP-based
|
||||
archive containing the pre-parsed BPMN and SVG files, the source files (for
|
||||
reference) and a metadata.ini file that contains enough information to
|
||||
create a BpmnProcessSpec instance from the archive (e.g. the ID of the
|
||||
entry point process).
|
||||
|
||||
This class can be extended and any public method overridden to do
|
||||
additional validation / parsing or to package additional metadata.
|
||||
|
||||
Extension point:
|
||||
|
||||
PARSER_CLASS: provide the class that should be used to parse the BPMN
|
||||
files. The fully-qualified name will be included in the metadata.ini file,
|
||||
so that the BpmnSerializer can instantiate the right parser to deal with
|
||||
the package.
|
||||
|
||||
Editor hooks: package_for_editor_<editor name>(self, spec, filename):
|
||||
Called once for each BPMN file. Should add any additional files to the
|
||||
archive.
|
||||
"""
|
||||
|
||||
METADATA_FILE = "metadata.ini"
|
||||
MANIFEST_FILE = "manifest.ini"
|
||||
PARSER_CLASS = BpmnParser
|
||||
|
||||
def __init__(self, package_file, entry_point_process, meta_data=None,
|
||||
editor=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param package_file: a file-like object where the contents of the
|
||||
package must be written to
|
||||
|
||||
:param entry_point_process: the name or ID of the entry point process
|
||||
|
||||
:param meta_data: A list of meta-data tuples to include in the
|
||||
metadata.ini file (in addition to the standard ones)
|
||||
|
||||
:param editor: The name of the editor used to create the source BPMN /
|
||||
SVG files. This activates additional hook method calls. (optional)
|
||||
"""
|
||||
self.package_file = package_file
|
||||
self.entry_point_process = entry_point_process
|
||||
self.parser = self.PARSER_CLASS()
|
||||
self.meta_data = meta_data or []
|
||||
self.input_files = []
|
||||
self.input_path_prefix = None
|
||||
self.editor = editor
|
||||
self.manifest = {}
|
||||
|
||||
def add_bpmn_file(self, filename):
|
||||
"""
|
||||
Add the given BPMN filename to the packager's set.
|
||||
"""
|
||||
self.add_bpmn_files([filename])
|
||||
|
||||
def add_bpmn_files_by_glob(self, g):
|
||||
"""
|
||||
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
|
||||
packager's set.
|
||||
"""
|
||||
self.add_bpmn_files(glob.glob(g))
|
||||
|
||||
def add_bpmn_files(self, filenames):
|
||||
"""
|
||||
Add all filenames in the given list to the packager's set.
|
||||
"""
|
||||
self.input_files += filenames
|
||||
|
||||
def create_package(self):
|
||||
"""
|
||||
Creates the package, writing the data out to the provided file-like
|
||||
object.
|
||||
"""
|
||||
|
||||
# Check that all files exist (and calculate the longest shared path
|
||||
# prefix):
|
||||
self.input_path_prefix = None
|
||||
for filename in self.input_files:
|
||||
if not os.path.isfile(filename):
|
||||
raise ValueError(
|
||||
'%s does not exist or is not a file' % filename)
|
||||
if self.input_path_prefix:
|
||||
full = os.path.abspath(os.path.dirname(filename))
|
||||
while not (full.startswith(self.input_path_prefix) and
|
||||
self.input_path_prefix):
|
||||
self.input_path_prefix = self.input_path_prefix[:-1]
|
||||
else:
|
||||
self.input_path_prefix = os.path.abspath(
|
||||
os.path.dirname(filename))
|
||||
|
||||
# Parse all of the XML:
|
||||
self.bpmn = {}
|
||||
for filename in self.input_files:
|
||||
bpmn = etree.parse(filename)
|
||||
self.bpmn[os.path.abspath(filename)] = bpmn
|
||||
|
||||
# Now run through pre-parsing and validation:
|
||||
for filename, bpmn in list(self.bpmn.items()):
|
||||
bpmn = self.pre_parse_and_validate(bpmn, filename)
|
||||
self.bpmn[os.path.abspath(filename)] = bpmn
|
||||
|
||||
# Now check that we can parse it fine:
|
||||
for filename, bpmn in list(self.bpmn.items()):
|
||||
self.parser.add_bpmn_xml(bpmn, filename=filename)
|
||||
# at this point, we have a item in self.wf_spec.get_specs_depth_first()
|
||||
# that has a filename of None and a bpmn that needs to be added to the
|
||||
# list below in for spec.
|
||||
self.wf_spec = self.parser.get_spec(self.entry_point_process)
|
||||
|
||||
# Now package everything:
|
||||
self.package_zip = zipfile.ZipFile(
|
||||
self.package_file, "w", compression=zipfile.ZIP_DEFLATED)
|
||||
|
||||
done_files = set()
|
||||
|
||||
for spec in self.wf_spec.get_specs_depth_first():
|
||||
filename = spec.file
|
||||
if filename is None:
|
||||
# This is for when we are doing a subworkflow, and it
|
||||
# creates something in the bpmn spec list, but it really has
|
||||
# no file. In this case, it is safe to skip the add to the
|
||||
# zip file.
|
||||
continue
|
||||
if filename not in done_files:
|
||||
done_files.add(filename)
|
||||
|
||||
bpmn = self.bpmn[os.path.abspath(filename)]
|
||||
self.write_to_package_zip(
|
||||
"%s.bpmn" % spec.name, etree.tostring(bpmn.getroot()))
|
||||
|
||||
self.write_to_package_zip(
|
||||
"src/" + self._get_zip_path(filename), filename)
|
||||
|
||||
self._call_editor_hook('package_for_editor', spec, filename)
|
||||
|
||||
self.write_meta_data()
|
||||
self.write_manifest()
|
||||
|
||||
self.package_zip.close()
|
||||
|
||||
def write_file_to_package_zip(self, filename, src_filename):
|
||||
"""
|
||||
Writes a local file in to the zip file and adds it to the manifest
|
||||
dictionary
|
||||
|
||||
:param filename: The zip file name
|
||||
|
||||
:param src_filename: the local file name
|
||||
"""
|
||||
f = open(src_filename)
|
||||
with f:
|
||||
data = f.read()
|
||||
self.manifest[filename] = md5hash(data)
|
||||
self.package_zip.write(src_filename, filename)
|
||||
|
||||
def write_to_package_zip(self, filename, data):
|
||||
"""
|
||||
Writes data to the zip file and adds it to the manifest dictionary
|
||||
|
||||
:param filename: The zip file name
|
||||
|
||||
:param data: the data
|
||||
"""
|
||||
self.manifest[filename] = md5hash(data)
|
||||
self.package_zip.writestr(filename, data)
|
||||
|
||||
def write_manifest(self):
|
||||
"""
|
||||
Write the manifest content to the zip file. It must be a predictable
|
||||
order.
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
config.add_section('Manifest')
|
||||
|
||||
for f in sorted(self.manifest.keys()):
|
||||
config.set('Manifest', f.replace(
|
||||
'\\', '/').lower(), self.manifest[f])
|
||||
|
||||
ini = StringIO()
|
||||
config.write(ini)
|
||||
self.manifest_data = ini.getvalue()
|
||||
self.package_zip.writestr(self.MANIFEST_FILE, self.manifest_data)
|
||||
|
||||
def pre_parse_and_validate(self, bpmn, filename):
|
||||
"""
|
||||
A subclass can override this method to provide additional parseing or
|
||||
validation. It should call the parent method first.
|
||||
|
||||
:param bpmn: an lxml tree of the bpmn content
|
||||
|
||||
:param filename: the source file name
|
||||
|
||||
This must return the updated bpmn object (or a replacement)
|
||||
"""
|
||||
bpmn = self._call_editor_hook(
|
||||
'pre_parse_and_validate', bpmn, filename) or bpmn
|
||||
|
||||
return bpmn
|
||||
|
||||
def pre_parse_and_validate_signavio(self, bpmn, filename):
|
||||
"""
|
||||
This is the Signavio specific editor hook for pre-parsing and
|
||||
validation.
|
||||
|
||||
A subclass can override this method to provide additional parseing or
|
||||
validation. It should call the parent method first.
|
||||
|
||||
:param bpmn: an lxml tree of the bpmn content
|
||||
|
||||
:param filename: the source file name
|
||||
|
||||
This must return the updated bpmn object (or a replacement)
|
||||
"""
|
||||
self._check_for_disconnected_boundary_events_signavio(bpmn, filename)
|
||||
self._fix_call_activities_signavio(bpmn, filename)
|
||||
return bpmn
|
||||
|
||||
def _check_for_disconnected_boundary_events_signavio(self, bpmn, filename):
|
||||
# signavio sometimes disconnects a BoundaryEvent from it's owning task
|
||||
# They then show up as intermediateCatchEvents without any incoming
|
||||
# sequence flows
|
||||
xpath = xpath_eval(bpmn)
|
||||
for catch_event in xpath('.//bpmn:intermediateCatchEvent'):
|
||||
incoming = xpath(
|
||||
'.//bpmn:sequenceFlow[@targetRef="%s"]' %
|
||||
catch_event.get('id'))
|
||||
if not incoming:
|
||||
raise ValidationException(
|
||||
'Intermediate Catch Event has no incoming sequences. '
|
||||
'This might be a Boundary Event that has been '
|
||||
'disconnected.',
|
||||
node=catch_event, filename=filename)
|
||||
|
||||
def _fix_call_activities_signavio(self, bpmn, filename):
|
||||
"""
|
||||
Signavio produces slightly invalid BPMN for call activity nodes... It
|
||||
is supposed to put a reference to the id of the called process in to
|
||||
the calledElement attribute. Instead it stores a string (which is the
|
||||
name of the process - not its ID, in our interpretation) in an
|
||||
extension tag.
|
||||
|
||||
This code gets the name of the 'subprocess reference', finds a process
|
||||
with a matching name, and sets the calledElement attribute to the id of
|
||||
the process.
|
||||
"""
|
||||
for node in xpath_eval(bpmn)(".//bpmn:callActivity"):
|
||||
calledElement = node.get('calledElement', None)
|
||||
if not calledElement:
|
||||
signavioMetaData = xpath_eval(node, extra_ns={
|
||||
'signavio': SIGNAVIO_NS})(
|
||||
'.//signavio:signavioMetaData[@metaKey="entry"]')
|
||||
if not signavioMetaData:
|
||||
raise ValidationException(
|
||||
'No Signavio "Subprocess reference" specified.',
|
||||
node=node, filename=filename)
|
||||
subprocess_reference = one(signavioMetaData).get('metaValue')
|
||||
matches = []
|
||||
for b in list(self.bpmn.values()):
|
||||
for p in xpath_eval(b)(".//bpmn:process"):
|
||||
if (p.get('name', p.get('id', None)) ==
|
||||
subprocess_reference):
|
||||
matches.append(p)
|
||||
if not matches:
|
||||
raise ValidationException(
|
||||
"No matching process definition found for '%s'." %
|
||||
subprocess_reference, node=node, filename=filename)
|
||||
if len(matches) != 1:
|
||||
raise ValidationException(
|
||||
"More than one matching process definition "
|
||||
" found for '%s'." % subprocess_reference, node=node,
|
||||
filename=filename)
|
||||
|
||||
node.set('calledElement', matches[0].get('id'))
|
||||
|
||||
def _call_editor_hook(self, hook, *args, **kwargs):
|
||||
if self.editor:
|
||||
hook_func = getattr(self, "%s_%s" % (hook, self.editor), None)
|
||||
if hook_func:
|
||||
return hook_func(*args, **kwargs)
|
||||
return None
|
||||
|
||||
def package_for_editor_signavio(self, spec, filename):
|
||||
"""
|
||||
Adds the SVG files to the archive for this BPMN file.
|
||||
"""
|
||||
signavio_file = filename[:-len('.bpmn20.xml')] + '.signavio.xml'
|
||||
if os.path.exists(signavio_file):
|
||||
self.write_file_to_package_zip(
|
||||
"src/" + self._get_zip_path(signavio_file), signavio_file)
|
||||
|
||||
f = open(signavio_file, 'r')
|
||||
try:
|
||||
signavio_tree = etree.parse(f)
|
||||
finally:
|
||||
f.close()
|
||||
svg_node = one(signavio_tree.findall('.//svg-representation'))
|
||||
self.write_to_package_zip("%s.svg" % spec.name, svg_node.text)
|
||||
|
||||
def write_meta_data(self):
|
||||
"""
|
||||
Writes the metadata.ini file to the archive.
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
config.add_section('MetaData')
|
||||
config.set('MetaData', 'entry_point_process', self.wf_spec.name)
|
||||
if self.editor:
|
||||
config.set('MetaData', 'editor', self.editor)
|
||||
|
||||
for k, v in self.meta_data:
|
||||
config.set('MetaData', k, v)
|
||||
|
||||
if not self.PARSER_CLASS == BpmnParser:
|
||||
config.set('MetaData', 'parser_class_module',
|
||||
inspect.getmodule(self.PARSER_CLASS).__name__)
|
||||
config.set('MetaData', 'parser_class', self.PARSER_CLASS.__name__)
|
||||
|
||||
ini = StringIO()
|
||||
config.write(ini)
|
||||
self.write_to_package_zip(self.METADATA_FILE, ini.getvalue())
|
||||
|
||||
def _get_zip_path(self, filename):
|
||||
p = os.path.abspath(filename)[
|
||||
len(self.input_path_prefix):].replace(os.path.sep, '/')
|
||||
while p.startswith('/'):
|
||||
p = p[1:]
|
||||
return p
|
||||
|
||||
@classmethod
|
||||
def get_version(cls):
|
||||
try:
|
||||
import pkg_resources # part of setuptools
|
||||
version = pkg_resources.require("SpiffWorkflow")[0].version
|
||||
except Exception:
|
||||
version = 'DEV'
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def create_option_parser(cls):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
return OptionParser(
|
||||
usage=("%prog [options] -o <package file> -p "
|
||||
"<entry point process> <input BPMN files ...>"),
|
||||
version="SpiffWorkflow BPMN Packager %s" % (cls.get_version()))
|
||||
|
||||
@classmethod
|
||||
def add_main_options(cls, parser):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
parser.add_option("-o", "--output", dest="package_file",
|
||||
help="create the BPMN package in the specified file")
|
||||
parser.add_option("-p", "--process", dest="entry_point_process",
|
||||
help="specify the entry point process")
|
||||
parser.add_option("-c", "--config-file", dest="config_file",
|
||||
help="specify a config file to use")
|
||||
parser.add_option(
|
||||
"-i", "--initialise-config-file", action="store_true",
|
||||
dest="init_config_file", default=False,
|
||||
help="create a new config file from the specified options")
|
||||
|
||||
group = OptionGroup(parser, "BPMN Editor Options",
|
||||
"These options are not required, but may be "
|
||||
" provided to activate special features of "
|
||||
"supported BPMN editors.")
|
||||
group.add_option("--editor", dest="editor",
|
||||
help="editors with special support: signavio")
|
||||
parser.add_option_group(group)
|
||||
|
||||
@classmethod
|
||||
def add_additional_options(cls, parser):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
group = OptionGroup(parser, "Target Engine Options",
|
||||
"These options are not required, but may be "
|
||||
"provided if a specific "
|
||||
"BPMN application engine is targeted.")
|
||||
group.add_option("-e", "--target-engine", dest="target_engine",
|
||||
help="target the specified BPMN application engine")
|
||||
group.add_option(
|
||||
"-t", "--target-version", dest="target_engine_version",
|
||||
help="target the specified version of the BPMN application engine")
|
||||
parser.add_option_group(group)
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, config, options, args, parser, package_file=None):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
if not args:
|
||||
parser.error("no input files specified")
|
||||
if not (package_file or options.package_file):
|
||||
parser.error("no package file specified")
|
||||
if not options.entry_point_process:
|
||||
parser.error("no entry point process specified")
|
||||
|
||||
@classmethod
|
||||
def merge_options_and_config(cls, config, options, args):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
if args:
|
||||
config.set(CONFIG_SECTION_NAME, 'input_files', ','.join(args))
|
||||
elif config.has_option(CONFIG_SECTION_NAME, 'input_files'):
|
||||
for i in config.get(CONFIG_SECTION_NAME, 'input_files').split(','):
|
||||
if not os.path.isabs(i):
|
||||
i = os.path.abspath(
|
||||
os.path.join(os.path.dirname(options.config_file), i))
|
||||
args.append(i)
|
||||
|
||||
cls.merge_option_and_config_str('package_file', config, options)
|
||||
cls.merge_option_and_config_str('entry_point_process', config, options)
|
||||
cls.merge_option_and_config_str('target_engine', config, options)
|
||||
cls.merge_option_and_config_str(
|
||||
'target_engine_version', config, options)
|
||||
cls.merge_option_and_config_str('editor', config, options)
|
||||
|
||||
@classmethod
|
||||
def merge_option_and_config_str(cls, option_name, config, options):
|
||||
"""
|
||||
Utility method to merge an option and config, with the option taking "
|
||||
precedence
|
||||
"""
|
||||
|
||||
opt = getattr(options, option_name, None)
|
||||
if opt:
|
||||
config.set(CONFIG_SECTION_NAME, option_name, opt)
|
||||
elif config.has_option(CONFIG_SECTION_NAME, option_name):
|
||||
setattr(options, option_name, config.get(
|
||||
CONFIG_SECTION_NAME, option_name))
|
||||
|
||||
@classmethod
|
||||
def create_meta_data(cls, options, args, parser):
|
||||
"""
|
||||
Override in subclass if required.
|
||||
"""
|
||||
meta_data = []
|
||||
meta_data.append(('spiff_version', cls.get_version()))
|
||||
if options.target_engine:
|
||||
meta_data.append(('target_engine', options.target_engine))
|
||||
if options.target_engine:
|
||||
meta_data.append(
|
||||
('target_engine_version', options.target_engine_version))
|
||||
return meta_data
|
||||
|
||||
@classmethod
|
||||
def main(cls, argv=None, package_file=None):
|
||||
parser = cls.create_option_parser()
|
||||
|
||||
cls.add_main_options(parser)
|
||||
|
||||
cls.add_additional_options(parser)
|
||||
|
||||
(options, args) = parser.parse_args(args=argv)
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
if options.config_file:
|
||||
config.read(options.config_file)
|
||||
if not config.has_section(CONFIG_SECTION_NAME):
|
||||
config.add_section(CONFIG_SECTION_NAME)
|
||||
|
||||
cls.merge_options_and_config(config, options, args)
|
||||
if options.init_config_file:
|
||||
if not options.config_file:
|
||||
parser.error(
|
||||
"no config file specified - cannot initialise config file")
|
||||
f = open(options.config_file, "w")
|
||||
with f:
|
||||
config.write(f)
|
||||
return
|
||||
|
||||
cls.check_args(config, options, args, parser, package_file)
|
||||
|
||||
meta_data = cls.create_meta_data(options, args, parser)
|
||||
|
||||
packager = cls(package_file=package_file or options.package_file,
|
||||
entry_point_process=options.entry_point_process,
|
||||
meta_data=meta_data, editor=options.editor)
|
||||
for a in args:
|
||||
packager.add_bpmn_files_by_glob(a)
|
||||
packager.create_package()
|
||||
|
||||
return packager
|
||||
|
||||
|
||||
def main(packager_class=None):
|
||||
"""
|
||||
:param packager_class: The Packager class to use. Default: Packager.
|
||||
"""
|
||||
|
||||
if not packager_class:
|
||||
packager_class = Packager
|
||||
|
||||
packager_class.main()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,20 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .workflow import BpmnWorkflowSerializer
|
||||
from .bpmn_converters import BpmnDataConverter
|
|
@ -0,0 +1,345 @@
|
|||
from functools import partial
|
||||
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification
|
||||
|
||||
from .dictionary import DictionaryConverter
|
||||
|
||||
from ..specs.events import SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
|
||||
from ..specs.events import TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition
|
||||
from ..specs.events import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition
|
||||
from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition
|
||||
|
||||
from ..specs.BpmnSpecMixin import BpmnSpecMixin, SequenceFlow
|
||||
from ...operators import Attrib, PathAttrib
|
||||
|
||||
|
||||
class BpmnDataConverter(DictionaryConverter):
|
||||
"""
|
||||
The default converter for task and workflow data. It allows some commonly used python objects
|
||||
to be converted to a form that can be serialized with JSOM
|
||||
|
||||
It also serves as a simple example for anyone who needs custom data serialization. If you have
|
||||
custom objects or python objects not included here in your workflow/task data, then you should
|
||||
replace or extend this with one that can handle the contents of your workflow.
|
||||
"""
|
||||
def __init__(self):
|
||||
|
||||
super().__init__()
|
||||
self.register(UUID, lambda v: { 'value': str(v) }, lambda v: UUID(v['value']))
|
||||
self.register(datetime, lambda v: { 'value': v.isoformat() }, lambda v: datetime.fromisoformat(v['value']))
|
||||
self.register(timedelta, lambda v: { 'days': v.days, 'seconds': v.seconds }, lambda v: timedelta(**v))
|
||||
|
||||
def convert(self, obj):
|
||||
self.clean(obj)
|
||||
return super().convert(obj)
|
||||
|
||||
def clean(self, obj):
|
||||
# This removes functions and other callables from task data.
|
||||
# By default we don't want to serialize these
|
||||
if isinstance(obj, dict):
|
||||
items = [ (k, v) for k, v in obj.items() ]
|
||||
for key, value in items:
|
||||
if callable(value):
|
||||
del obj[key]
|
||||
|
||||
class BpmnDataSpecificationConverter:
|
||||
|
||||
@staticmethod
|
||||
def to_dict(data_spec):
|
||||
return { 'name': data_spec.name, 'description': data_spec.description }
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dct):
|
||||
return BpmnDataSpecification(**dct)
|
||||
|
||||
|
||||
|
||||
class BpmnTaskSpecConverter(DictionaryConverter):
|
||||
"""
|
||||
This the base Task Spec Converter.
|
||||
|
||||
It contains methods for parsing generic and BPMN task spec attributes.
|
||||
|
||||
If you have extended any of the the BPMN tasks with custom functionality, you'll need to
|
||||
implement a converter for those task spec types. You'll need to implement the `to_dict` and
|
||||
`from_dict` methods on any inheriting classes.
|
||||
|
||||
The default task spec converters are in `task_converters`; the `camunda` and `dmn`
|
||||
serialization packages contain other examples.
|
||||
"""
|
||||
|
||||
def __init__(self, spec_class, data_converter, typename=None):
|
||||
"""The default task spec converter. This will generally be registered with a workflow
|
||||
spec converter.
|
||||
|
||||
Task specs can contain arbitrary data, though none of the default BPMN tasks do. We
|
||||
may remove this functionality in the future. Therefore, the data_converter can be
|
||||
`None`; if this is the case, task spec attributes that can contain arbitrary data will be
|
||||
ignored.
|
||||
|
||||
:param spec_class: the class defining the task type
|
||||
:param data_converter: a converter for custom data (can be None)
|
||||
:param typename: an optional typename for the object registration
|
||||
"""
|
||||
super().__init__()
|
||||
self.spec_class = spec_class
|
||||
self.data_converter = data_converter
|
||||
self.typename = typename if typename is not None else spec_class.__name__
|
||||
|
||||
event_definitions = [ NoneEventDefinition, CancelEventDefinition, TerminateEventDefinition,
|
||||
SignalEventDefinition, MessageEventDefinition, ErrorEventDefinition, EscalationEventDefinition,
|
||||
TimerEventDefinition, CycleTimerEventDefinition ]
|
||||
|
||||
for event_definition in event_definitions:
|
||||
self.register(
|
||||
event_definition,
|
||||
self.event_definition_to_dict,
|
||||
partial(self.event_defintion_from_dict, event_definition)
|
||||
)
|
||||
|
||||
self.register(SequenceFlow, self.sequence_flow_to_dict, self.sequence_flow_from_dict)
|
||||
self.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib))
|
||||
self.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib))
|
||||
self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict)
|
||||
|
||||
def to_dict(self, spec):
|
||||
"""
|
||||
The convert method that will be called when a Task Spec Converter is registered with a
|
||||
Workflow Spec Converter.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def from_dict(self, dct):
|
||||
"""
|
||||
The restore method that will be called when a Task Spec Converter is registered with a
|
||||
Workflow Spec Converter.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_default_attributes(self, spec):
|
||||
"""Extracts the default Spiff attributes from a task spec.
|
||||
|
||||
:param spec: the task spec to be converted
|
||||
|
||||
Returns:
|
||||
a dictionary of standard task spec attributes
|
||||
"""
|
||||
dct = {
|
||||
'id': spec.id,
|
||||
'name': spec.name,
|
||||
'description': spec.description,
|
||||
'manual': spec.manual,
|
||||
'internal': spec.internal,
|
||||
'lookahead': spec.lookahead,
|
||||
'inputs': [task.name for task in spec.inputs],
|
||||
'outputs': [task.name for task in spec.outputs],
|
||||
}
|
||||
# This stuff is also all defined in the base task spec, but can contain data, so we need
|
||||
# our data serializer. I think we should try to get this stuff out of the base task spec.
|
||||
if self.data_converter is not None:
|
||||
dct['data'] = self.data_converter.convert(spec.data)
|
||||
dct['defines'] = self.data_converter.convert(spec.defines)
|
||||
dct['pre_assign'] = self.data_converter.convert(spec.pre_assign)
|
||||
dct['post_assign'] = self.data_converter.convert(spec.post_assign)
|
||||
|
||||
return dct
|
||||
|
||||
def get_bpmn_attributes(self, spec):
|
||||
"""Extracts the attributes added by the `BpmnSpecMixin` class.
|
||||
|
||||
:param spec: the task spec to be converted
|
||||
|
||||
Returns:
|
||||
a dictionary of BPMN task spec attributes
|
||||
"""
|
||||
return {
|
||||
'lane': spec.lane,
|
||||
'documentation': spec.documentation,
|
||||
'loopTask': spec.loopTask,
|
||||
'position': spec.position,
|
||||
'outgoing_sequence_flows': dict(
|
||||
(k, self.convert(v)) for k, v in spec.outgoing_sequence_flows.items()
|
||||
),
|
||||
'outgoing_sequence_flows_by_id': dict(
|
||||
(k, self.convert(v)) for k, v in spec.outgoing_sequence_flows_by_id.items()
|
||||
),
|
||||
'data_input_associations': [ self.convert(obj) for obj in spec.data_input_associations ],
|
||||
'data_output_associations': [ self.convert(obj) for obj in spec.data_output_associations ],
|
||||
}
|
||||
|
||||
def get_join_attributes(self, spec):
|
||||
"""Extracts attributes for task specs that inherit from `Join`.
|
||||
|
||||
:param spec: the task spec to be converted
|
||||
|
||||
Returns:
|
||||
a dictionary of `Join` task spec attributes
|
||||
"""
|
||||
return {
|
||||
'split_task': spec.split_task,
|
||||
'threshold': spec.threshold,
|
||||
'cancel': spec.cancel_remaining,
|
||||
}
|
||||
|
||||
def get_subworkflow_attributes(self, spec):
|
||||
"""Extracts attributes for task specs that inherit from `SubWorkflowTask`.
|
||||
|
||||
:param spec: the task spec to be converted
|
||||
|
||||
Returns:
|
||||
a dictionary of subworkflow task spec attributes
|
||||
"""
|
||||
return {'spec': spec.spec}
|
||||
|
||||
def task_spec_from_dict(self, dct):
|
||||
"""
|
||||
Creates a task spec based on the supplied dictionary. It handles setting the default
|
||||
task spec attributes as well as attributes added by `BpmnSpecMixin`.
|
||||
|
||||
:param dct: the dictionary to create the task spec from
|
||||
|
||||
Returns:
|
||||
a restored task spec
|
||||
"""
|
||||
internal = dct.pop('internal')
|
||||
inputs = dct.pop('inputs')
|
||||
outputs = dct.pop('outputs')
|
||||
|
||||
spec = self.spec_class(**dct)
|
||||
spec.internal = internal
|
||||
spec.inputs = inputs
|
||||
spec.outputs = outputs
|
||||
spec.id = dct['id']
|
||||
|
||||
if self.data_converter is not None:
|
||||
spec.data = self.data_converter.restore(dct.get('data', {}))
|
||||
spec.defines = self.data_converter.restore(dct.get('defines', {}))
|
||||
spec.pre_assign = self.data_converter.restore(dct.get('pre_assign', {}))
|
||||
spec.post_assign = self.data_converter.restore(dct.get('post_assign', {}))
|
||||
|
||||
if isinstance(spec, BpmnSpecMixin):
|
||||
spec.documentation = dct.pop('documentation', None)
|
||||
spec.lane = dct.pop('lane', None)
|
||||
spec.loopTask = dct.pop('loopTask', False)
|
||||
spec.outgoing_sequence_flows = self.restore(dct.pop('outgoing_sequence_flows', {}))
|
||||
spec.outgoing_sequence_flows_by_id = self.restore(dct.pop('outgoing_sequence_flows_by_id', {}))
|
||||
spec.data_input_associations = self.restore(dct.pop('data_input_associations', []))
|
||||
spec.data_output_associations = self.restore(dct.pop('data_output_associations', []))
|
||||
|
||||
return spec
|
||||
|
||||
def event_definition_to_dict(self, event_definition):
|
||||
"""
|
||||
Converts an BPMN event definition to a dict. It will not typically be called directly,
|
||||
but via `convert` and will convert any event type supported by Spiff.
|
||||
|
||||
:param event_definition: the event_definition to be converted.
|
||||
|
||||
Returns:
|
||||
a dictionary representation of an event definition
|
||||
"""
|
||||
dct = {'internal': event_definition.internal, 'external': event_definition.external}
|
||||
|
||||
if isinstance(event_definition, NamedEventDefinition):
|
||||
dct['name'] = event_definition.name
|
||||
if isinstance(event_definition, MessageEventDefinition):
|
||||
dct['correlation_properties'] = [prop.__dict__ for prop in event_definition.correlation_properties]
|
||||
if isinstance(event_definition, TimerEventDefinition):
|
||||
dct['label'] = event_definition.label
|
||||
dct['dateTime'] = event_definition.dateTime
|
||||
if isinstance(event_definition, CycleTimerEventDefinition):
|
||||
dct['label'] = event_definition.label
|
||||
dct['cycle_definition'] = event_definition.cycle_definition
|
||||
if isinstance(event_definition, ErrorEventDefinition):
|
||||
dct['error_code'] = event_definition.error_code
|
||||
if isinstance(event_definition, EscalationEventDefinition):
|
||||
dct['escalation_code'] = event_definition.escalation_code
|
||||
|
||||
return dct
|
||||
|
||||
def event_defintion_from_dict(self, definition_class, dct):
|
||||
"""Restores an event definition. It will not typically be called directly, but via
|
||||
`restore` and will restore any BPMN event type supporred by Spiff.
|
||||
|
||||
:param definition_class: the class that will be used to create the object
|
||||
:param dct: the event definition attributes
|
||||
|
||||
Returns:
|
||||
an `EventDefinition` object
|
||||
"""
|
||||
internal, external = dct.pop('internal'), dct.pop('external')
|
||||
if 'correlation_properties' in dct:
|
||||
dct['correlation_properties'] = [CorrelationProperty(**prop) for prop in dct['correlation_properties']]
|
||||
event_definition = definition_class(**dct)
|
||||
event_definition.internal = internal
|
||||
event_definition.external = external
|
||||
return event_definition
|
||||
|
||||
def sequence_flow_to_dict(self, flow):
|
||||
return {
|
||||
'id': flow.id,
|
||||
'name': flow.name,
|
||||
'documentation': flow.documentation,
|
||||
'target_task_spec': flow.target_task_spec.name
|
||||
}
|
||||
|
||||
def sequence_flow_from_dict(self, dct):
|
||||
return SequenceFlow(**dct)
|
||||
|
||||
def attrib_to_dict(self, attrib):
|
||||
return { 'name': attrib.name }
|
||||
|
||||
def attrib_from_dict(self, attrib_class, dct):
|
||||
return attrib_class(dct['name'])
|
||||
|
||||
|
||||
class BpmnWorkflowSpecConverter(DictionaryConverter):
|
||||
"""
|
||||
This is the base converter for a BPMN workflow spec.
|
||||
|
||||
It will register converters for the task spec types contained in the workflow, as well as
|
||||
the workflow spec class itself.
|
||||
|
||||
This class can be extended if you implement a custom workflow spec type. See the converter
|
||||
in `workflow_spec_converter` for an example.
|
||||
"""
|
||||
|
||||
def __init__(self, spec_class, task_spec_converters, data_converter=None):
|
||||
"""
|
||||
Converter for a BPMN workflow spec class.
|
||||
|
||||
The `to_dict` and `from_dict` methods of the given task spec converter classes will
|
||||
be registered, so that they can be restored automatically.
|
||||
|
||||
The data_converter applied to task *spec* data, not task data, and may be `None`. See
|
||||
`BpmnTaskSpecConverter` for more discussion.
|
||||
|
||||
:param spec_class: the workflow spec class
|
||||
:param task_spec_converters: a list of `BpmnTaskSpecConverter` classes
|
||||
:param data_converter: an optional data converter
|
||||
"""
|
||||
super().__init__()
|
||||
self.spec_class = spec_class
|
||||
self.data_converter = data_converter
|
||||
|
||||
self.register(spec_class, self.to_dict, self.from_dict)
|
||||
for converter in task_spec_converters:
|
||||
self.register(converter.spec_class, converter.to_dict, converter.from_dict, converter.typename)
|
||||
self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict)
|
||||
|
||||
def to_dict(self, spec):
|
||||
"""
|
||||
The convert method that will be called when a Workflow Spec Converter is registered with a
|
||||
Workflow Converter.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def from_dict(self, dct):
|
||||
"""
|
||||
The restore method that will be called when a Workflow Spec Converter is registered with a
|
||||
Workflow Converter.
|
||||
"""
|
||||
raise NotImplementedError
|
|
@ -0,0 +1,259 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from ...camunda.specs.UserTask import UserTask
|
||||
from ...dmn.engine.DMNEngine import DMNEngine
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...dmn.specs.model import DecisionTable
|
||||
from ...serializer.dict import DictionarySerializer
|
||||
from ...util.impl import get_class
|
||||
from ..specs.BpmnSpecMixin import SequenceFlow
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ..specs.MultiInstanceTask import MultiInstanceTask
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.SubWorkflowTask import SubWorkflowTask
|
||||
|
||||
|
||||
class BPMNDictionarySerializer(DictionarySerializer):
|
||||
|
||||
def serialize_task_spec(self, spec):
|
||||
s_state = super().serialize_task_spec(spec)
|
||||
|
||||
if hasattr(spec,'documentation'):
|
||||
s_state['documentation'] = spec.documentation
|
||||
if hasattr(spec,'extensions'):
|
||||
s_state['extensions'] = self.serialize_dict(spec.extensions)
|
||||
if hasattr(spec,'lane'):
|
||||
s_state['lane'] = spec.lane
|
||||
|
||||
if hasattr(spec,'outgoing_sequence_flows'):
|
||||
s_state['outgoing_sequence_flows'] = {x:spec.outgoing_sequence_flows[x].serialize() for x in
|
||||
spec.outgoing_sequence_flows.keys()}
|
||||
s_state['outgoing_sequence_flows_by_id'] = {x:spec.outgoing_sequence_flows_by_id[x].serialize() for x in
|
||||
spec.outgoing_sequence_flows_by_id.keys()}
|
||||
|
||||
# Note: Events are not serialized; this is documented in
|
||||
# the TaskSpec API docs.
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_task_spec(self, wf_spec, s_state, spec):
|
||||
spec = super().deserialize_task_spec(wf_spec, s_state, spec)
|
||||
# I would use the s_state.get('extensions',{}) inside of the deserialize
|
||||
# but many tasks have no extensions on them.
|
||||
if s_state.get('extensions',None) != None:
|
||||
spec.extensions = self.deserialize_dict(s_state['extensions'])
|
||||
if 'documentation' in s_state.keys():
|
||||
spec.documentation = s_state['documentation']
|
||||
|
||||
if 'lane' in s_state.keys():
|
||||
spec.lane = s_state.get('lane',None)
|
||||
if s_state.get('outgoing_sequence_flows',None):
|
||||
spec.outgoing_sequence_flows = s_state.get('outgoing_sequence_flows', {})
|
||||
spec.outgoing_sequence_flows_by_id = s_state.get('outgoing_sequence_flows_by_id', {})
|
||||
|
||||
return spec
|
||||
|
||||
def serialize_exclusive_gateway(self, spec):
|
||||
s_state = self.serialize_multi_choice(spec)
|
||||
s_state['default_task_spec'] = spec.default_task_spec
|
||||
return s_state
|
||||
|
||||
def deserialize_exclusive_gateway(self, wf_spec, s_state):
|
||||
spec = ExclusiveGateway(wf_spec, s_state['name'])
|
||||
self.deserialize_multi_choice(wf_spec, s_state, spec=spec)
|
||||
spec.default_task_spec = s_state['default_task_spec']
|
||||
return spec
|
||||
|
||||
def serialize_script_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['script'] = spec.script
|
||||
return s_state
|
||||
|
||||
def deserialize_script_task(self, wf_spec, s_state):
|
||||
spec = ScriptTask(wf_spec, s_state['name'], s_state['script'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_subworkflow_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['wf_class'] = spec.wf_class.__module__ + "." + spec.wf_class.__name__
|
||||
s_state['spec'] = self.serialize_workflow_spec(spec.spec)
|
||||
return s_state
|
||||
|
||||
def deserialize_subworkflow_task(self, wf_spec, s_state, cls):
|
||||
spec = cls(wf_spec, s_state['name'])
|
||||
spec.wf_class = get_class(s_state['wf_class'])
|
||||
if 'spec_name' in s_state:
|
||||
s_state['spec'] = self.SPEC_STATES[s_state['spec_name']]
|
||||
spec.spec = self.deserialize_workflow_spec(s_state['spec'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_generic_event(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
if spec.event_definition:
|
||||
s_state['event_definition'] = spec.event_definition.serialize()
|
||||
else:
|
||||
s_state['event_definition'] = None
|
||||
return s_state
|
||||
|
||||
def deserialize_generic_event(self, wf_spec, s_state, cls):
|
||||
if s_state.get('event_definition',None):
|
||||
evtcls = get_class(s_state['event_definition']['classname'])
|
||||
event = evtcls.deserialize(s_state['event_definition'])
|
||||
else:
|
||||
event = None
|
||||
spec = cls(wf_spec, s_state['name'], event)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_boundary_event_parent(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['main_child_task_spec'] = spec.main_child_task_spec.id
|
||||
return s_state
|
||||
|
||||
def deserialize_boundary_event_parent(self, wf_spec, s_state, cls):
|
||||
|
||||
main_child_task_spec = wf_spec.get_task_spec_from_id(s_state['main_child_task_spec'])
|
||||
spec = cls(wf_spec, s_state['name'], main_child_task_spec)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_boundary_event(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
if spec.cancel_activity:
|
||||
s_state['cancel_activity'] = spec.cancel_activity
|
||||
else:
|
||||
s_state['cancel_activity'] = None
|
||||
if spec.event_definition:
|
||||
s_state['event_definition'] = spec.event_definition.serialize()
|
||||
else:
|
||||
s_state['event_definition'] = None
|
||||
return s_state
|
||||
|
||||
def deserialize_boundary_event(self, wf_spec, s_state, cls):
|
||||
cancel_activity = s_state.get('cancel_activity',None)
|
||||
if s_state['event_definition']:
|
||||
eventclass = get_class(s_state['event_definition']['classname'])
|
||||
event = eventclass.deserialize(s_state['event_definition'])
|
||||
else:
|
||||
event = None
|
||||
spec = cls(wf_spec, s_state['name'], cancel_activity=cancel_activity,event_definition=event)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_user_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['form'] = spec.form
|
||||
return s_state
|
||||
|
||||
def deserialize_user_task(self, wf_spec, s_state):
|
||||
spec = UserTask(wf_spec, s_state['name'], s_state['form'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
|
||||
def serialize_business_rule_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
dictrep = spec.dmnEngine.decision_table.serialize()
|
||||
# future
|
||||
s_state['dmn'] = dictrep
|
||||
return s_state
|
||||
|
||||
def deserialize_business_rule_task(self, wf_spec, s_state):
|
||||
dt = DecisionTable(None,None)
|
||||
dt.deserialize(s_state['dmn'])
|
||||
dmn_engine = DMNEngine(dt)
|
||||
spec = BusinessRuleTask(wf_spec, s_state['name'], dmn_engine)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_multi_instance(self, spec):
|
||||
s_state = super().serialize_multi_instance(spec)
|
||||
# here we need to add in all of the things that would get serialized
|
||||
# for other classes that the MultiInstance could be -
|
||||
#
|
||||
if hasattr(spec,'form'):
|
||||
s_state['form'] = spec.form
|
||||
|
||||
if isinstance(spec,MultiInstanceTask):
|
||||
s_state['collection'] = self.serialize_arg(spec.collection)
|
||||
s_state['elementVar'] = self.serialize_arg(spec.elementVar)
|
||||
s_state['completioncondition'] = self.serialize_arg(spec.completioncondition)
|
||||
s_state['isSequential'] = self.serialize_arg(spec.isSequential)
|
||||
s_state['loopTask'] = self.serialize_arg(spec.loopTask)
|
||||
if (hasattr(spec,'expanded')):
|
||||
s_state['expanded'] = self.serialize_arg(spec.expanded)
|
||||
if isinstance(spec,BusinessRuleTask):
|
||||
br_state = self.serialize_business_rule_task(spec)
|
||||
s_state['dmn'] = br_state['dmn']
|
||||
if isinstance(spec, ScriptTask):
|
||||
br_state = self.serialize_script_task(spec)
|
||||
s_state['script'] = br_state['script']
|
||||
if isinstance(spec, SubWorkflowTask):
|
||||
br_state = self.serialize_subworkflow(spec)
|
||||
s_state['wf_class'] = br_state['wf_class']
|
||||
s_state['spec'] = br_state['spec']
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_multi_instance(self, wf_spec, s_state, cls=None):
|
||||
cls = super().deserialize_multi_instance(wf_spec, s_state, cls)
|
||||
if isinstance(cls,MultiInstanceTask):
|
||||
cls.isSequential = self.deserialize_arg(s_state['isSequential'])
|
||||
cls.loopTask = self.deserialize_arg(s_state['loopTask'])
|
||||
cls.elementVar = self.deserialize_arg(s_state['elementVar'])
|
||||
cls.completioncondition = self.deserialize_arg(s_state['completioncondition'])
|
||||
cls.collection = self.deserialize_arg(s_state['collection'])
|
||||
if s_state.get('expanded',None):
|
||||
cls.expanded = self.deserialize_arg(s_state['expanded'])
|
||||
if isinstance(cls,BusinessRuleTask):
|
||||
dt = DecisionTable(None,None)
|
||||
dt.deserialize(s_state['dmn'])
|
||||
dmn_engine = DMNEngine(dt)
|
||||
cls.dmnEngine=dmn_engine
|
||||
if isinstance(cls, ScriptTask):
|
||||
cls.script = s_state['script']
|
||||
if isinstance(cls, SubWorkflowTask):
|
||||
cls.wf_class = get_class(s_state['wf_class'])
|
||||
cls.spec = self.deserialize_workflow_spec(s_state['spec'])
|
||||
|
||||
if s_state.get('form',None):
|
||||
cls.form = s_state['form']
|
||||
|
||||
return cls
|
||||
|
||||
def _deserialize_workflow_spec_task_spec(self, spec, task_spec, name):
|
||||
if hasattr(task_spec,'outgoing_sequence_flows'):
|
||||
for entry,value in task_spec.outgoing_sequence_flows.items():
|
||||
task_spec.outgoing_sequence_flows[entry] = \
|
||||
SequenceFlow(value['id'],
|
||||
value['name'],
|
||||
value['documentation'],
|
||||
spec.get_task_spec_from_id(value['target_task_spec']))
|
||||
for entry, value in task_spec.outgoing_sequence_flows_by_id.items():
|
||||
task_spec.outgoing_sequence_flows_by_id[entry] = \
|
||||
SequenceFlow(value['id'],
|
||||
value['name'],
|
||||
value['documentation'],
|
||||
spec.get_task_spec_from_id(value['target_task_spec']))
|
||||
super()._deserialize_workflow_spec_task_spec(spec, task_spec, name)
|
||||
|
||||
def _prevtaskclass_bases(self, oldtask):
|
||||
return (MultiInstanceTask, oldtask)
|
|
@ -0,0 +1,105 @@
|
|||
from functools import partial
|
||||
|
||||
class DictionaryConverter:
|
||||
"""
|
||||
This is a base class used to convert BPMN specs, workflows, tasks, and data to
|
||||
dictionaries of JSON-serializable objects. Actual serialization is done as the
|
||||
very last step by other classes.
|
||||
|
||||
This class allows you to register to_dict and from_dict functions for non-JSON-
|
||||
serializable objects.
|
||||
|
||||
When an object is passed into `convert`, it will call the supplied to_dict
|
||||
function on any classes that have been registered. The supplied to_dict function
|
||||
must return a dictionary. The object's `typename` will be added to this dictionary
|
||||
by the converter.
|
||||
|
||||
The (unqualified) class name will be used as the `typename` if one is not supplied.
|
||||
You can optionally supply our own names (you'll need to do this if you need to
|
||||
identically named classes in multiple packages).
|
||||
|
||||
When a dictionary is passed into `restore`, it will be checked for a `typename` key.
|
||||
If a registered `typename` is found, the supplied from_dict function will be
|
||||
called. Unrecognized objects will be returned as-is.
|
||||
|
||||
For a simple example of how to use this class, see the `BpmnDataConverter` in
|
||||
`bpmn_converters`.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.convert_to_dict = { }
|
||||
self.convert_from_dict = { }
|
||||
self.typenames = { }
|
||||
|
||||
def register(self, cls, to_dict, from_dict, typename=None):
|
||||
"""Register a conversion/restoration.
|
||||
|
||||
The `to_dict` function must return a dictionary; if no `typename` is given,
|
||||
the unquallified class name will be used.
|
||||
|
||||
:param cls: the class that will be converted/restored
|
||||
:param to_dict: a function that will be called with the object as an argument
|
||||
:param from_dict: a function that restores the object from the dict
|
||||
:param typename: an optional typename for identifying the converted object
|
||||
"""
|
||||
typename = cls.__name__ if typename is None else typename
|
||||
self.typenames[cls] = typename
|
||||
self.convert_to_dict[typename] = partial(self.obj_to_dict, typename, to_dict)
|
||||
self.convert_from_dict[typename] = partial(self.obj_from_dict, from_dict)
|
||||
|
||||
@staticmethod
|
||||
def obj_to_dict(typename, func, obj):
|
||||
dct = func(obj)
|
||||
dct.update({'typename': typename})
|
||||
return dct
|
||||
|
||||
@staticmethod
|
||||
def obj_from_dict(func, dct):
|
||||
return func(dct)
|
||||
|
||||
def convert(self, obj):
|
||||
"""
|
||||
This is the public conversion method. It will be applied to dictionary
|
||||
values, list items, and the object itself, applying the to_dict functions
|
||||
of any registered type to the objects, or return the object unchanged if
|
||||
it is not recognized.
|
||||
|
||||
:param obj: the object to be converter
|
||||
|
||||
Returns:
|
||||
the dictionary representation for registered objects or the original
|
||||
for unregistered objects
|
||||
"""
|
||||
typename = self.typenames.get(obj.__class__)
|
||||
if typename in self.convert_to_dict:
|
||||
to_dict = self.convert_to_dict.get(typename)
|
||||
return to_dict(obj)
|
||||
elif isinstance(obj, dict):
|
||||
return dict((k, self.convert(v)) for k, v in obj.items())
|
||||
elif isinstance(obj, (list, tuple, set)):
|
||||
return obj.__class__([ self.convert(item) for item in obj ])
|
||||
else:
|
||||
return obj
|
||||
|
||||
def restore(self, val):
|
||||
"""
|
||||
This is the public restoration method. It will be applied to dictionary
|
||||
values, list items, and the value itself, checking for a `typename` key and
|
||||
applying the from_dict function of any registered type, or return the value
|
||||
unchanged if it is not recognized.
|
||||
|
||||
:param val: the value to be converted
|
||||
|
||||
Returns:
|
||||
the restored object for registered objects or the original for
|
||||
unregistered values
|
||||
"""
|
||||
if isinstance(val, dict) and 'typename' in val:
|
||||
from_dict = self.convert_from_dict.get(val.pop('typename'))
|
||||
return from_dict(val)
|
||||
elif isinstance(val, dict):
|
||||
return dict((k, self.restore(v)) for k, v in val.items())
|
||||
if isinstance(val, (list, tuple, set)):
|
||||
return val.__class__([ self.restore(item) for item in val ])
|
||||
else:
|
||||
return val
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import json
|
||||
from ..serializer.dict import BPMNDictionarySerializer
|
||||
from ...camunda.specs.UserTask import Form
|
||||
from ...serializer.json import JSONSerializer
|
||||
|
||||
class BPMNJSONSerializer(BPMNDictionarySerializer, JSONSerializer):
|
||||
|
||||
def _object_hook(self, dct):
|
||||
if '__form__' in dct:
|
||||
return Form(init=json.loads(dct['__form__']))
|
||||
|
||||
return super()._object_hook(dct)
|
||||
|
||||
def _default(self, obj):
|
||||
if isinstance(obj,Form):
|
||||
return {'__form__': json.dumps(obj, default=lambda o:
|
||||
self._jsonableHandler(o))}
|
||||
|
||||
return super()._default(obj)
|
|
@ -0,0 +1,310 @@
|
|||
from uuid import UUID
|
||||
|
||||
from .bpmn_converters import BpmnTaskSpecConverter
|
||||
|
||||
from ...specs import StartTask
|
||||
from ...specs.Simple import Simple
|
||||
from ...specs.LoopResetTask import LoopResetTask
|
||||
|
||||
from ..specs.BpmnProcessSpec import _EndJoin
|
||||
from ..specs.BpmnSpecMixin import _BpmnCondition
|
||||
|
||||
from ..specs.NoneTask import NoneTask
|
||||
from ..specs.UserTask import UserTask
|
||||
from ..specs.ManualTask import ManualTask
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess
|
||||
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ..specs.InclusiveGateway import InclusiveGateway
|
||||
from ..specs.ParallelGateway import ParallelGateway
|
||||
|
||||
from ..specs.events import StartEvent, EndEvent, BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask
|
||||
|
||||
from ..workflow import BpmnWorkflow
|
||||
|
||||
|
||||
class SimpleTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(Simple, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class StartTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(StartTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
class LoopResetTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(LoopResetTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
# Maybe I should add this to the base task converter, but I'm trying to keep it free of
|
||||
# anything but task related conversions
|
||||
dct['destination_id'] = str(spec.destination_id)
|
||||
dct['destination_spec_name'] = spec.destination_spec_name
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
spec = self.task_spec_from_dict(dct)
|
||||
spec.destination_id = UUID(spec.destination_id)
|
||||
return spec
|
||||
|
||||
class EndJoinConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(_EndJoin, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class NoneTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(NoneTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class UserTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(UserTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class ManualTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(ManualTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class ScriptTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(ScriptTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct['script'] = spec.script
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class CallActivityTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(CallActivity, data_converter, typename)
|
||||
self.wf_class = BpmnWorkflow
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct.update(self.get_subworkflow_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
dct['subworkflow_spec'] = dct.pop('spec')
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class TransactionSubprocessTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(TransactionSubprocess, data_converter, typename)
|
||||
self.wf_class = BpmnWorkflow
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct.update(self.get_subworkflow_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
dct['subworkflow_spec'] = dct.pop('spec')
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class ExclusiveGatewayConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(ExclusiveGateway, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct['default_task_spec'] = spec.default_task_spec
|
||||
dct['cond_task_specs'] = [ self.bpmn_condition_to_dict(cond) for cond in spec.cond_task_specs ]
|
||||
dct['choice'] = spec.choice
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
conditions = dct.pop('cond_task_specs')
|
||||
default_task_spec = dct.pop('default_task_spec')
|
||||
spec = self.task_spec_from_dict(dct)
|
||||
spec.cond_task_specs = [ self.bpmn_condition_from_dict(cond) for cond in conditions ]
|
||||
spec.default_task_spec = default_task_spec
|
||||
return spec
|
||||
|
||||
def bpmn_condition_from_dict(self, dct):
|
||||
return (_BpmnCondition(dct['condition']), dct['task_spec'])
|
||||
|
||||
def bpmn_condition_to_dict(self, condition):
|
||||
|
||||
expr, task_spec = condition
|
||||
return {
|
||||
'condition': expr.args[0],
|
||||
'task_spec': task_spec
|
||||
}
|
||||
|
||||
class InclusiveGatewayConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(InclusiveGateway, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct.update(self.get_join_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class ParallelGatewayConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(ParallelGateway, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct.update(self.get_join_attributes(spec))
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class EventConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, spec_class, data_converter, typename):
|
||||
super().__init__(spec_class, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct['event_definition'] = self.convert(spec.event_definition)
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
dct['event_definition'] = self.restore(dct['event_definition'])
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class StartEventConverter(EventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(StartEvent, data_converter, typename)
|
||||
|
||||
|
||||
class EndEventConverter(EventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(EndEvent, data_converter, typename)
|
||||
|
||||
|
||||
class IntermediateCatchEventConverter(EventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(IntermediateCatchEvent, data_converter, typename)
|
||||
|
||||
|
||||
class ReceiveTaskConverter(EventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(ReceiveTask, data_converter, typename)
|
||||
|
||||
|
||||
class IntermediateThrowEventConverter(EventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(IntermediateThrowEvent, data_converter, typename)
|
||||
|
||||
|
||||
class SendTaskConverter(EventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(SendTask, data_converter, typename)
|
||||
|
||||
|
||||
class BoundaryEventConverter(EventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(BoundaryEvent, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = super().to_dict(spec)
|
||||
dct['cancel_activity'] = spec.cancel_activity
|
||||
return dct
|
||||
|
||||
|
||||
class BoundaryEventParentConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(_BoundaryEventParent, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct['main_child_task_spec'] = spec.main_child_task_spec.name
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
|
@ -0,0 +1,54 @@
|
|||
from copy import deepcopy
|
||||
|
||||
def version_1_0_to_1_1(old):
|
||||
"""
|
||||
Upgrade v1.0 serializations to v1.1.
|
||||
|
||||
Starting with Spiff 1.1.8, subworkflows are no longer integrated in main task tree. When
|
||||
a subworkflow (a subprocess, transaction, or call activity) is reached, a subprocss is
|
||||
added to the top level workflow and the task enters a waiting state until the workflow
|
||||
completes.
|
||||
|
||||
To make the serialization backwards compatible, we delete the tasks from the main workflow
|
||||
task list and add them to the appropriate subprocess and recreate the remaining subprocess
|
||||
attributes based on the task states.
|
||||
"""
|
||||
new = deepcopy(old)
|
||||
subprocesses = dict((sp, { 'tasks': {}, 'root': None, 'data': {}, 'success': True }) for sp in new['subprocesses'])
|
||||
|
||||
# Move the tasks out of the top-level
|
||||
for sp, task_ids in new['subprocesses'].items():
|
||||
for task_id in task_ids:
|
||||
if task_id in new['tasks']:
|
||||
subprocesses[sp]['tasks'][task_id] = new['tasks'].pop(task_id)
|
||||
if subprocesses[sp]['root'] is None:
|
||||
subprocesses[sp]['root'] = task_id
|
||||
subprocesses[sp]['tasks'][task_id]['parent'] = None
|
||||
|
||||
# Fix up th task and workflow states
|
||||
waiting = []
|
||||
for sp in subprocesses:
|
||||
completed = sorted(
|
||||
[t for t in subprocesses[sp]['tasks'].values() if t['state'] in [32, 64] ],
|
||||
key=lambda t: t['last_state_change']
|
||||
)
|
||||
if len(completed) > 0:
|
||||
subprocesses[sp]['last_task'] = completed[-1]
|
||||
# If there are uncompleted tasks, set the subworkflow task state to waiting
|
||||
if len(completed) < len(subprocesses[sp]['tasks']):
|
||||
waiting.append(sp)
|
||||
|
||||
# Check the top level and all subprocesses for waiting tasks
|
||||
# Also remove any children that are no longer in the tree
|
||||
for sp in [new] + list(subprocesses.values()):
|
||||
for task_id, task in sp['tasks'].items():
|
||||
if task_id in waiting:
|
||||
task['state'] = 8
|
||||
task['children'] = [ c for c in task['children'] if c in sp['tasks'] ]
|
||||
|
||||
new['subprocesses'] = subprocesses
|
||||
return new
|
||||
|
||||
MIGRATIONS = {
|
||||
'1.0': version_1_0_to_1_1,
|
||||
}
|
|
@ -0,0 +1,295 @@
|
|||
import json
|
||||
import gzip
|
||||
from copy import deepcopy
|
||||
from uuid import UUID
|
||||
|
||||
from .version_migration import MIGRATIONS
|
||||
|
||||
from .bpmn_converters import BpmnDataConverter
|
||||
|
||||
from ..workflow import BpmnMessage, BpmnWorkflow
|
||||
from ..specs.SubWorkflowTask import SubWorkflowTask
|
||||
from ...task import Task
|
||||
|
||||
from .workflow_spec_converter import BpmnProcessSpecConverter
|
||||
|
||||
from .task_spec_converters import SimpleTaskConverter, StartTaskConverter, EndJoinConverter, LoopResetTaskConverter
|
||||
from .task_spec_converters import NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter
|
||||
from .task_spec_converters import CallActivityTaskConverter, TransactionSubprocessTaskConverter
|
||||
from .task_spec_converters import StartEventConverter, EndEventConverter
|
||||
from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter
|
||||
from .task_spec_converters import SendTaskConverter, ReceiveTaskConverter
|
||||
from .task_spec_converters import BoundaryEventConverter, BoundaryEventParentConverter
|
||||
from .task_spec_converters import ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
|
||||
|
||||
DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
|
||||
SimpleTaskConverter, StartTaskConverter, EndJoinConverter, LoopResetTaskConverter,
|
||||
NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter,
|
||||
CallActivityTaskConverter, TransactionSubprocessTaskConverter,
|
||||
StartEventConverter, EndEventConverter, SendTaskConverter, ReceiveTaskConverter,
|
||||
IntermediateCatchEventConverter, IntermediateThrowEventConverter,
|
||||
BoundaryEventConverter, BoundaryEventParentConverter,
|
||||
ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
|
||||
]
|
||||
|
||||
class BpmnWorkflowSerializer:
|
||||
"""
|
||||
This class implements a customizable BPMN Workflow serializer, based on a Workflow Spec Converter
|
||||
and a Data Converter.
|
||||
|
||||
The goal is to provide modular serialization capabilities.
|
||||
|
||||
You'll need to configure a Workflow Spec Converter with Task Spec Converters for any task types
|
||||
present in your workflows. Because the Task Spec Converters also require initialization, the process
|
||||
of building a Workflow Spec Converter is a little tedious; therefore, this class provides a static
|
||||
method `configure_workflow_spec_converter` that can extend and/or override the default Task Spec
|
||||
Converter list and return a Workflow Spec Converter that will recognize the overridden specs.
|
||||
|
||||
If you have implemented any custom task specs, you'll need to write a converter to handle them and
|
||||
provide it to this method; if you using only the defaults, you can call this with no arguments.
|
||||
|
||||
If your workflow contains non-JSON-serializable objects, you'll need to extend or replace the
|
||||
default data converter with one that will handle them. This converter needs to implement
|
||||
`convert` and `restore` methods.
|
||||
|
||||
Serialization occurs in two phases: the first is to convert everything in the workflow to a
|
||||
dictionary containins only JSON-serializable objects and the second is dumping to JSON.
|
||||
|
||||
This means that you can call the `workflow_to_dict` or `workflow_from_dict` methods separately from
|
||||
conversion to JSON for further manipulation of the state, or selective serialization of only certain
|
||||
parts of the workflow more conveniently. You can of course call methods from the Workflow Spec and
|
||||
Data Converters via the `spec_converter` and `data_converter` attributes as well to bypass the
|
||||
overhead of converting or restoring the entire thing.
|
||||
"""
|
||||
|
||||
# This is the default version set on the workflow, it can be overwritten
|
||||
# using the configure_workflow_spec_converter.
|
||||
VERSION = "1.0"
|
||||
VERSION_KEY = "serializer_version"
|
||||
DEFAULT_JSON_ENCODER_CLS = None
|
||||
DEFAULT_JSON_DECODER_CLS = None
|
||||
|
||||
@staticmethod
|
||||
def configure_workflow_spec_converter(task_spec_overrides=None, data_converter=None, version=VERSION):
|
||||
"""
|
||||
This method can be used to add additional task spec converters to the default BPMN Process
|
||||
converter.
|
||||
|
||||
The task specs may contain arbitrary data, though none of the default task specs use it. We
|
||||
may disallow that in the future, so we don't recommend using this capability.
|
||||
|
||||
The task spec converters also take an optional typename argument; this will be included in the
|
||||
serialized dictionaries so that the original class can restored. The unqualified classname is
|
||||
used if none is provided. If a class in `task_spec_overrides` conflicts with one of the
|
||||
defaults, the default will be removed and the provided one will be used instead. If you need
|
||||
both for some reason, you'll have to instantiate the task spec converters and workflow spec
|
||||
converter yourself.
|
||||
|
||||
:param task_spec_overrides: a list of task spec converter classes
|
||||
:param data_converter: an optional data converter for task spec data
|
||||
"""
|
||||
if task_spec_overrides is None:
|
||||
task_spec_overrides = []
|
||||
|
||||
classnames = [c.__name__ for c in task_spec_overrides]
|
||||
converters = [c(data_converter=data_converter) for c in task_spec_overrides]
|
||||
for c in DEFAULT_TASK_SPEC_CONVERTER_CLASSES:
|
||||
if c.__name__ not in classnames:
|
||||
converters.append(c(data_converter=data_converter))
|
||||
return BpmnProcessSpecConverter(converters, version)
|
||||
|
||||
|
||||
def __init__(self, spec_converter=None, data_converter=None, wf_class=None, version=VERSION, json_encoder_cls=DEFAULT_JSON_ENCODER_CLS, json_decoder_cls=DEFAULT_JSON_DECODER_CLS):
|
||||
"""Intializes a Workflow Serializer with the given Workflow, Task and Data Converters.
|
||||
|
||||
:param spec_converter: the workflow spec converter
|
||||
:param data_converter: the data converter
|
||||
:param wf_class: the workflow class
|
||||
:param json_encoder_cls: JSON encoder class to be used for dumps/dump operations
|
||||
:param json_decoder_cls: JSON decoder class to be used for loads/load operations
|
||||
"""
|
||||
super().__init__()
|
||||
self.spec_converter = spec_converter if spec_converter is not None else self.configure_workflow_spec_converter()
|
||||
self.data_converter = data_converter if data_converter is not None else BpmnDataConverter()
|
||||
self.wf_class = wf_class if wf_class is not None else BpmnWorkflow
|
||||
self.json_encoder_cls = json_encoder_cls
|
||||
self.json_decoder_cls = json_decoder_cls
|
||||
self.VERSION = version
|
||||
|
||||
def serialize_json(self, workflow, use_gzip=False):
|
||||
"""Serialize the dictionary representation of the workflow to JSON.
|
||||
|
||||
:param workflow: the workflow to serialize
|
||||
|
||||
Returns:
|
||||
a JSON dump of the dictionary representation
|
||||
"""
|
||||
dct = self.workflow_to_dict(workflow)
|
||||
dct[self.VERSION_KEY] = self.VERSION
|
||||
json_str = json.dumps(dct, cls=self.json_encoder_cls)
|
||||
return gzip.compress(json_str.encode('utf-8')) if use_gzip else json_str
|
||||
|
||||
def __get_dict(self, serialization, use_gzip=False):
|
||||
if isinstance(serialization, dict):
|
||||
dct = serialization
|
||||
elif use_gzip:
|
||||
dct = json.loads(gzip.decompress(serialization), cls=self.json_decoder_cls)
|
||||
else:
|
||||
dct = json.loads(serialization, cls=self.json_decoder_cls)
|
||||
return dct
|
||||
|
||||
def deserialize_json(self, serialization, read_only=False, use_gzip=False):
|
||||
dct = self.__get_dict(serialization, use_gzip)
|
||||
return self.workflow_from_dict(dct, read_only)
|
||||
|
||||
def get_version(self, serialization, use_gzip=False):
|
||||
try:
|
||||
dct = self.__get_dict(serialization, use_gzip)
|
||||
if self.VERSION_KEY in dct:
|
||||
return dct[self.VERSION_KEY]
|
||||
except: # Don't bail out trying to get a version, just return none.
|
||||
return None
|
||||
|
||||
def workflow_to_dict(self, workflow):
|
||||
"""Return a JSON-serializable dictionary representation of the workflow.
|
||||
|
||||
:param workflow: the workflow
|
||||
|
||||
Returns:
|
||||
a dictionary representation of the workflow
|
||||
"""
|
||||
# These properties are applicable to top level & subprocesses
|
||||
dct = self.process_to_dict(workflow)
|
||||
# These are only used at the top-level
|
||||
dct['spec'] = self.spec_converter.convert(workflow.spec)
|
||||
dct['subprocess_specs'] = dict(
|
||||
(name, self.spec_converter.convert(spec)) for name, spec in workflow.subprocess_specs.items()
|
||||
)
|
||||
dct['subprocesses'] = dict(
|
||||
(str(task_id), self.process_to_dict(sp)) for task_id, sp in workflow.subprocesses.items()
|
||||
)
|
||||
dct['bpmn_messages'] = [self.message_to_dict(msg) for msg in workflow.bpmn_messages]
|
||||
return dct
|
||||
|
||||
def workflow_from_dict(self, dct, read_only=False):
|
||||
"""Create a workflow based on a dictionary representation.
|
||||
|
||||
:param dct: the dictionary representation
|
||||
:param read_only: optionally disable modifying the workflow
|
||||
|
||||
Returns:
|
||||
a BPMN Workflow object
|
||||
"""
|
||||
dct_copy = deepcopy(dct)
|
||||
|
||||
# Upgrade serialized version if necessary
|
||||
if self.VERSION_KEY in dct_copy:
|
||||
version = dct_copy.pop(self.VERSION_KEY)
|
||||
if version in MIGRATIONS:
|
||||
dct_copy = MIGRATIONS[version](dct_copy)
|
||||
|
||||
# Restore the top level spec and the subprocess specs
|
||||
spec = self.spec_converter.restore(dct_copy.pop('spec'))
|
||||
subprocess_specs = dct_copy.pop('subprocess_specs', {})
|
||||
for name, wf_dct in subprocess_specs.items():
|
||||
subprocess_specs[name] = self.spec_converter.restore(wf_dct)
|
||||
|
||||
# Create the top-level workflow
|
||||
workflow = self.wf_class(spec, subprocess_specs, read_only=read_only)
|
||||
|
||||
# Restore any unretrieve messages
|
||||
workflow.bpmn_messages = [ self.message_from_dict(msg) for msg in dct.get('bpmn_messages', []) ]
|
||||
|
||||
# Restore the remainder of the workflow
|
||||
workflow.data = self.data_converter.restore(dct_copy.pop('data'))
|
||||
workflow.success = dct_copy.pop('success')
|
||||
workflow.task_tree = self.task_tree_from_dict(dct_copy, dct_copy.pop('root'), None, workflow)
|
||||
|
||||
return workflow
|
||||
|
||||
def task_to_dict(self, task):
|
||||
return {
|
||||
'id': str(task.id),
|
||||
'parent': str(task.parent.id) if task.parent is not None else None,
|
||||
'children': [ str(child.id) for child in task.children ],
|
||||
'last_state_change': task.last_state_change,
|
||||
'state': task.state,
|
||||
'task_spec': task.task_spec.name,
|
||||
'triggered': task.triggered,
|
||||
'workflow_name': task.workflow.name,
|
||||
'internal_data': self.data_converter.convert(task.internal_data),
|
||||
'data': self.data_converter.convert(task.data),
|
||||
}
|
||||
|
||||
def task_from_dict(self, dct, workflow, task_spec, parent):
|
||||
|
||||
task = Task(workflow, task_spec, parent)
|
||||
task.id = UUID(dct['id'])
|
||||
task.state = dct['state']
|
||||
task.last_state_change = dct['last_state_change']
|
||||
task.triggered = dct['triggered']
|
||||
task.internal_data = self.data_converter.restore(dct['internal_data'])
|
||||
task.data = self.data_converter.restore(dct['data'])
|
||||
return task
|
||||
|
||||
def task_tree_to_dict(self, root):
|
||||
|
||||
tasks = { }
|
||||
def add_task(task):
|
||||
dct = self.task_to_dict(task)
|
||||
tasks[dct['id']] = dct
|
||||
for child in task.children:
|
||||
add_task(child)
|
||||
|
||||
add_task(root)
|
||||
return tasks
|
||||
|
||||
def task_tree_from_dict(self, process_dct, task_id, parent_task, process, top_level_workflow=None, top_level_dct=None):
|
||||
|
||||
top = top_level_workflow or process
|
||||
top_dct = top_level_dct or process_dct
|
||||
|
||||
task_dict = process_dct['tasks'][task_id]
|
||||
task_spec = process.spec.task_specs[task_dict['task_spec']]
|
||||
task = self.task_from_dict(task_dict, process, task_spec, parent_task)
|
||||
if task_id == process_dct['last_task']:
|
||||
process.last_task = task
|
||||
|
||||
if isinstance(task_spec, SubWorkflowTask) and task_id in top_dct.get('subprocesses', {}):
|
||||
subprocess_spec = top.subprocess_specs[task_spec.spec]
|
||||
subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, read_only=top.read_only)
|
||||
subprocess_dct = top_dct['subprocesses'].get(task_id, {})
|
||||
subprocess.data = self.data_converter.restore(subprocess_dct.pop('data'))
|
||||
subprocess.success = subprocess_dct.pop('success')
|
||||
subprocess.task_tree = self.task_tree_from_dict(subprocess_dct, subprocess_dct.pop('root'), None, subprocess, top, top_dct)
|
||||
subprocess.completed_event.connect(task_spec._on_subworkflow_completed, task)
|
||||
top_level_workflow.subprocesses[task.id] = subprocess
|
||||
|
||||
for child in [ process_dct['tasks'][c] for c in task_dict['children'] ]:
|
||||
self.task_tree_from_dict(process_dct, child['id'], task, process, top, top_dct)
|
||||
|
||||
return task
|
||||
|
||||
def process_to_dict(self, process):
|
||||
return {
|
||||
'data': self.data_converter.convert(process.data),
|
||||
'last_task': str(process.last_task.id) if process.last_task is not None else None,
|
||||
'success': process.success,
|
||||
'tasks': self.task_tree_to_dict(process.task_tree),
|
||||
'root': str(process.task_tree.id),
|
||||
}
|
||||
|
||||
def message_to_dict(self, message):
|
||||
dct = {
|
||||
'correlations': dict([ (k, self.data_converter.convert(v)) for k, v in message.correlations.items() ]),
|
||||
'name': message.name,
|
||||
'payload': self.spec_converter.convert(message.payload),
|
||||
}
|
||||
return dct
|
||||
|
||||
def message_from_dict(self, dct):
|
||||
return BpmnMessage(
|
||||
dict([ (k, self.data_converter.restore(v)) for k, v in dct['correlations'].items() ]),
|
||||
dct['name'],
|
||||
self.spec_converter.restore(dct['payload'])
|
||||
)
|
|
@ -0,0 +1,198 @@
|
|||
from .bpmn_converters import BpmnWorkflowSpecConverter
|
||||
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||
from ..specs.MultiInstanceTask import MultiInstanceTask, getDynamicMIClass
|
||||
from ..specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent
|
||||
|
||||
from ...operators import Attrib, PathAttrib
|
||||
from ...specs.WorkflowSpec import WorkflowSpec
|
||||
|
||||
|
||||
class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
|
||||
|
||||
def __init__(self, task_spec_converters, data_converter=None):
|
||||
super().__init__(BpmnProcessSpec, task_spec_converters, data_converter)
|
||||
self.register(WorkflowSpec, self.base_workflow_spec_to_dict, self.from_dict)
|
||||
|
||||
def multi_instance_to_dict(self, spec):
|
||||
|
||||
# This is a hot mess, but I don't know how else to deal with the dynamically
|
||||
# generated classes. Why do we use them?
|
||||
classname = spec.prevtaskclass.split('.')[-1]
|
||||
# Bypass the automatic selection of a conversion function
|
||||
# This returns the partial function that was created on register for the original task type.
|
||||
# The second argument is the function that would be called by `convert`.
|
||||
conversion = self.convert_to_dict[classname]
|
||||
func = conversion.args[1]
|
||||
# We can just call it directly and add the typename manually
|
||||
dct = func(spec)
|
||||
dct['typename'] = classname
|
||||
# And we have to do this here, rather than in a converter
|
||||
# We also have to manually apply the Attrib conversions
|
||||
convert_attrib = lambda v: { 'name': v.name, 'typename': v.__class__.__name__ }
|
||||
dct.update({
|
||||
'times': convert_attrib(spec.times) if spec.times is not None else None,
|
||||
'elementVar': spec.elementVar,
|
||||
'collection': convert_attrib(spec.collection) if spec.collection is not None else None,
|
||||
# These are not defined in the constructor, but added by the parser, or somewhere else inappropriate
|
||||
'completioncondition': spec.completioncondition,
|
||||
'prevtaskclass': spec.prevtaskclass,
|
||||
'isSequential': spec.isSequential,
|
||||
})
|
||||
# Also from the parser, but not always present.
|
||||
if hasattr(spec, 'expanded'):
|
||||
dct['expanded'] = spec.expanded
|
||||
return dct
|
||||
|
||||
def multiinstance_from_dict(self, dct):
|
||||
|
||||
# The restore function removes items from the dictionary.
|
||||
# We need the original so that we can restore everything without enumerating all
|
||||
# possibiliies in this function.
|
||||
attrs = list(dct.keys())
|
||||
attrs.remove('typename')
|
||||
attrs.remove('wf_spec')
|
||||
# These need to be restored here
|
||||
attrs.remove('times')
|
||||
attrs.remove('collection')
|
||||
# If only I'd done this right in the DMN converter I wouldn't have to pollute this on with
|
||||
# task specific cases.
|
||||
if 'decision_table' in attrs:
|
||||
attrs.remove('decision_table')
|
||||
attrs.append('dmnEngine')
|
||||
|
||||
# Terrible ugly hack
|
||||
registered = dict((name, c) for c, name in self.typenames.items())
|
||||
# First get the dynamic class
|
||||
cls = getDynamicMIClass(dct['name'], registered[dct['typename']])
|
||||
# Restore the task according to the original task spec, so that its attributes can be converted
|
||||
# recursively
|
||||
original = self.restore(dct.copy())
|
||||
# But this task has the wrong class, so delete it from the spec
|
||||
del dct['wf_spec'].task_specs[original.name]
|
||||
|
||||
# Create a new class using the dynamic class
|
||||
task_spec = cls(**dct)
|
||||
# Restore the attributes that weren't recognized by the original converter
|
||||
restore_attrib = lambda v: Attrib(v['name']) if v['typename'] == 'Attrib' else PathAttrib(v['name'])
|
||||
task_spec.times = restore_attrib(dct['times']) if dct['times'] is not None else None
|
||||
task_spec.collection = restore_attrib(dct['collection']) if dct['collection'] is not None else None
|
||||
# Now copy everything else, from the temporary task spec if possible, otherwise the dict
|
||||
for attr in attrs:
|
||||
# If the original task has the attr, use the converted value
|
||||
if hasattr(original, attr):
|
||||
task_spec.__dict__[attr] = original.__dict__[attr]
|
||||
else:
|
||||
task_spec.__dict__[attr] = self.restore(dct[attr])
|
||||
|
||||
# Handle adding any remaining attributes from the original task type that might not be
|
||||
# present in the restored version (for example attributes added since last serialized)
|
||||
for attr in original.__dict__:
|
||||
if not hasattr(task_spec, attr):
|
||||
task_spec.__dict__[attr] = original.__dict__[attr]
|
||||
|
||||
return task_spec
|
||||
|
||||
def convert_task_spec_extensions(self, task_spec, dct):
|
||||
# Extensions will be moved out of the base parser, but since we currently add them to some
|
||||
# indeterminate set of tasks, we'll just check all the tasks for them here.
|
||||
if hasattr(task_spec, 'extensions'):
|
||||
dct.update({'extensions': task_spec.extensions})
|
||||
|
||||
def restore_task_spec_extensions(self, dct, task_spec):
|
||||
if 'extensions' in dct:
|
||||
task_spec.extensions = dct.pop('extensions')
|
||||
|
||||
def to_dict(self, spec):
|
||||
|
||||
dct = {
|
||||
'name': spec.name,
|
||||
'description': spec.description,
|
||||
'file': spec.file,
|
||||
'task_specs': {},
|
||||
'data_inputs': [ self.convert(obj) for obj in spec.data_inputs ],
|
||||
'data_outputs': [ self.convert(obj) for obj in spec.data_outputs ],
|
||||
'data_objects': dict([ (name, self.convert(obj)) for name, obj in spec.data_objects .items() ]),
|
||||
'correlation_keys': spec.correlation_keys,
|
||||
}
|
||||
for name, task_spec in spec.task_specs.items():
|
||||
if isinstance(task_spec, MultiInstanceTask):
|
||||
task_dict = self.multi_instance_to_dict(task_spec)
|
||||
else:
|
||||
task_dict = self.convert(task_spec)
|
||||
self.convert_task_spec_extensions(task_spec, task_dict)
|
||||
dct['task_specs'][name] = task_dict
|
||||
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
|
||||
spec = self.spec_class(name=dct['name'], description=dct['description'], filename=dct['file'])
|
||||
# There a nostart arg in the base workflow spec class that prevents start task creation, but
|
||||
# the BPMN process spec doesn't pass it in, so we have to delete the auto generated Start task.
|
||||
del spec.task_specs['Start']
|
||||
spec.start = None
|
||||
|
||||
# These are also automatically created with a workflow and should be replaced
|
||||
del spec.task_specs['End']
|
||||
del spec.task_specs[f'{spec.name}.EndJoin']
|
||||
|
||||
# Add the data specs
|
||||
spec.data_inputs = [ self.restore(obj_dct) for obj_dct in dct.pop('data_inputs', []) ]
|
||||
spec.data_outputs = [ self.restore(obj_dct) for obj_dct in dct.pop('data_outputs', []) ]
|
||||
spec.data_objects = dict([ (name, self.restore(obj_dct)) for name, obj_dct in dct.pop('data_objects', {}).items() ])
|
||||
|
||||
# Add messaging related stuff
|
||||
spec.correlation_keys = dct.pop('correlation_keys', {})
|
||||
|
||||
for name, task_dict in dct['task_specs'].items():
|
||||
# I hate this, but I need to pass in the workflow spec when I create the task.
|
||||
# IMO storing the workflow spec on the task spec is a TERRIBLE idea, but that's
|
||||
# how this thing works.
|
||||
task_dict['wf_spec'] = spec
|
||||
# Ugh.
|
||||
if 'prevtaskclass' in task_dict:
|
||||
task_spec = self.multiinstance_from_dict(task_dict)
|
||||
else:
|
||||
task_spec = self.restore(task_dict)
|
||||
if name == 'Start':
|
||||
spec.start = task_spec
|
||||
self.restore_task_spec_extensions(task_dict, task_spec)
|
||||
|
||||
# Now we have to go back and fix all the circular references to everything
|
||||
for task_spec in spec.task_specs.values():
|
||||
if isinstance(task_spec, BpmnSpecMixin):
|
||||
for flow in task_spec.outgoing_sequence_flows.values():
|
||||
flow.target_task_spec = spec.get_task_spec_from_name(flow.target_task_spec)
|
||||
for flow in task_spec.outgoing_sequence_flows_by_id.values():
|
||||
flow.target_task_spec = spec.get_task_spec_from_name(flow.target_task_spec)
|
||||
if isinstance(task_spec, _BoundaryEventParent):
|
||||
task_spec.main_child_task_spec = spec.get_task_spec_from_name(task_spec.main_child_task_spec)
|
||||
task_spec.inputs = [ spec.get_task_spec_from_name(name) for name in task_spec.inputs ]
|
||||
task_spec.outputs = [ spec.get_task_spec_from_name(name) for name in task_spec.outputs ]
|
||||
|
||||
return spec
|
||||
|
||||
def base_workflow_spec_to_dict(self, spec):
|
||||
|
||||
# We should delete this method when we stop supporting the old serializer.
|
||||
# It uses WorkflowSpec rather than BpmnWorkflowSpec, which does not support data objects.
|
||||
# I hate copying this code here, but I am NOT putting an "if isinstance" check in the
|
||||
# main method to handle a bug in the thing I'm replacing,
|
||||
|
||||
dct = {
|
||||
'name': spec.name,
|
||||
'description': spec.description,
|
||||
'file': spec.file,
|
||||
'task_specs': {},
|
||||
}
|
||||
for name, task_spec in spec.task_specs.items():
|
||||
if isinstance(task_spec, MultiInstanceTask):
|
||||
task_dict = self.multi_instance_to_dict(task_spec)
|
||||
else:
|
||||
task_dict = self.convert(task_spec)
|
||||
self.convert_task_spec_extensions(task_spec, task_dict)
|
||||
dct['task_specs'][name] = task_dict
|
||||
|
||||
return dct
|
|
@ -0,0 +1,185 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
|
||||
from ...task import TaskState
|
||||
from .UnstructuredJoin import UnstructuredJoin
|
||||
from ...specs.Simple import Simple
|
||||
from ...specs.WorkflowSpec import WorkflowSpec
|
||||
|
||||
|
||||
data_log = logging.getLogger('spiff.data')
|
||||
|
||||
class _EndJoin(UnstructuredJoin):
|
||||
|
||||
def _check_threshold_unstructured(self, my_task, force=False):
|
||||
# Look at the tree to find all ready and waiting tasks (excluding
|
||||
# ourself). The EndJoin waits for everyone!
|
||||
waiting_tasks = []
|
||||
for task in my_task.workflow.get_tasks(TaskState.READY | TaskState.WAITING):
|
||||
if task.thread_id != my_task.thread_id:
|
||||
continue
|
||||
if task.task_spec == my_task.task_spec:
|
||||
continue
|
||||
|
||||
is_mine = False
|
||||
w = task.workflow
|
||||
if w == my_task.workflow:
|
||||
is_mine = True
|
||||
while w and w.outer_workflow != w:
|
||||
w = w.outer_workflow
|
||||
if w == my_task.workflow:
|
||||
is_mine = True
|
||||
if is_mine:
|
||||
waiting_tasks.append(task)
|
||||
|
||||
return force or len(waiting_tasks) == 0, waiting_tasks
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
super(_EndJoin, self)._on_complete_hook(my_task)
|
||||
my_task.workflow.data.update(my_task.data)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_join(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_join(wf_spec, s_state, _EndJoin)
|
||||
|
||||
|
||||
class BpmnDataSpecification:
|
||||
|
||||
def __init__(self, name, description=None):
|
||||
"""
|
||||
:param name: the name of the task (the BPMN ID)
|
||||
:param description: the task description (the BPMN name)
|
||||
"""
|
||||
self.name = name
|
||||
self.description = description or name
|
||||
# In the future, we can add schemas defining the objects here.
|
||||
|
||||
def get(self, my_task):
|
||||
"""Copy a value form the workflow data to the task data."""
|
||||
if self.name not in my_task.workflow.data:
|
||||
message = f"Workflow variable {self.name} not found"
|
||||
raise WorkflowDataException(my_task, data_input=self, message=message)
|
||||
my_task.data[self.name] = deepcopy(my_task.workflow.data[self.name])
|
||||
|
||||
def set(self, my_task):
|
||||
"""Copy a value from the task data to the workflow data"""
|
||||
if self.name not in my_task.data:
|
||||
message = f"Task variable {self.name} not found"
|
||||
raise WorkflowDataException(my_task, data_output=self, message=message)
|
||||
my_task.workflow.data[self.name] = deepcopy(my_task.data[self.name])
|
||||
del my_task.data[self.name]
|
||||
data_log.info(f'Set workflow variable {self.name}', extra=my_task.log_info())
|
||||
|
||||
def copy(self, source, destination, data_input=False, data_output=False):
|
||||
"""Copy a value from one task to another."""
|
||||
if self.name not in source.data:
|
||||
message = f"Unable to copy {self.name}"
|
||||
raise WorkflowDataException(
|
||||
source,
|
||||
data_input=self if data_input else None,
|
||||
data_output=self if data_output else None,
|
||||
message=message
|
||||
)
|
||||
destination.data[self.name] = deepcopy(source.data[self.name])
|
||||
|
||||
|
||||
class BpmnProcessSpec(WorkflowSpec):
|
||||
"""
|
||||
This class represents the specification of a BPMN process workflow. This
|
||||
specialises the standard Spiff WorkflowSpec class with a few extra methods
|
||||
and attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, name=None, description=None, filename=None, svg=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param svg: This provides the SVG representation of the workflow as an
|
||||
LXML node. (optional)
|
||||
"""
|
||||
super(BpmnProcessSpec, self).__init__(name=name, filename=filename)
|
||||
self.end = _EndJoin(self, '%s.EndJoin' % (self.name))
|
||||
end = Simple(self, 'End')
|
||||
end.follow(self.end)
|
||||
self.svg = svg
|
||||
self.description = description
|
||||
self.data_inputs = []
|
||||
self.data_outputs = []
|
||||
self.data_objects = {}
|
||||
self.correlation_keys = {}
|
||||
|
||||
def get_all_lanes(self):
|
||||
"""
|
||||
Returns a set of the distinct lane names used in the process (including
|
||||
called activities)
|
||||
"""
|
||||
|
||||
done = set()
|
||||
lanes = set()
|
||||
|
||||
def recursive_find(task_spec):
|
||||
if task_spec in done:
|
||||
return
|
||||
|
||||
done.add(task_spec)
|
||||
|
||||
if hasattr(task_spec, 'lane') and task_spec.lane:
|
||||
lanes.add(task_spec.lane)
|
||||
|
||||
if hasattr(task_spec, 'spec'):
|
||||
recursive_find(task_spec.spec.start)
|
||||
|
||||
for t in task_spec.outputs:
|
||||
recursive_find(t)
|
||||
|
||||
recursive_find(self.start)
|
||||
|
||||
return lanes
|
||||
|
||||
def get_specs_depth_first(self):
|
||||
"""
|
||||
Get the specs for all processes (including called ones), in depth first
|
||||
order.
|
||||
"""
|
||||
|
||||
done = set()
|
||||
specs = [self]
|
||||
|
||||
def recursive_find(task_spec):
|
||||
if task_spec in done:
|
||||
return
|
||||
|
||||
done.add(task_spec)
|
||||
|
||||
if hasattr(task_spec, 'spec'):
|
||||
specs.append(task_spec.spec)
|
||||
recursive_find(task_spec.spec.start)
|
||||
|
||||
for t in task_spec.outputs:
|
||||
recursive_find(t)
|
||||
|
||||
recursive_find(self.start)
|
||||
|
||||
return specs
|
|
@ -0,0 +1,230 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from ...task import TaskState
|
||||
from ...operators import Operator
|
||||
from ...specs import TaskSpec
|
||||
|
||||
|
||||
class _BpmnCondition(Operator):
|
||||
|
||||
def __init__(self, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError("Too many arguments")
|
||||
super(_BpmnCondition, self).__init__(*args)
|
||||
|
||||
def _matches(self, task):
|
||||
return task.workflow.script_engine.evaluate(task, self.args[0])
|
||||
|
||||
|
||||
class SequenceFlow(object):
|
||||
|
||||
"""
|
||||
Keeps information relating to a sequence flow
|
||||
"""
|
||||
|
||||
def __init__(self, id, name, documentation, target_task_spec):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
self.id = id
|
||||
self.name = name.strip() if name else name
|
||||
self.documentation = documentation
|
||||
self.target_task_spec = target_task_spec
|
||||
|
||||
def serialize(self):
|
||||
return {'id':self.id,
|
||||
'name':self.name,
|
||||
'documentation':self.documentation,
|
||||
'target_task_spec':self.target_task_spec.id}
|
||||
|
||||
|
||||
class BpmnSpecMixin(TaskSpec):
|
||||
"""
|
||||
All BPMN spec classes should mix this superclass in. It adds a number of
|
||||
methods that are BPMN specific to the TaskSpec.
|
||||
"""
|
||||
|
||||
def __init__(self, wf_spec, name, lane=None, position=None, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param lane: Indicates the name of the lane that this task belongs to
|
||||
(optional).
|
||||
"""
|
||||
super(BpmnSpecMixin, self).__init__(wf_spec, name, **kwargs)
|
||||
self.outgoing_sequence_flows = {}
|
||||
self.outgoing_sequence_flows_by_id = {}
|
||||
self.lane = lane
|
||||
self.position = position or {'x': 0, 'y': 0}
|
||||
self.loopTask = False
|
||||
self.documentation = None
|
||||
self.data_input_associations = []
|
||||
self.data_output_associations = []
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'BPMN Task'
|
||||
|
||||
def is_loop_task(self):
|
||||
"""
|
||||
Returns true if this task is a BPMN looping task
|
||||
"""
|
||||
return self.loopTask
|
||||
|
||||
def connect_outgoing(self, taskspec, sequence_flow_id, sequence_flow_name,
|
||||
documentation):
|
||||
"""
|
||||
Connect this task spec to the indicated child.
|
||||
|
||||
:param sequence_flow_id: The ID of the connecting sequenceFlow node.
|
||||
|
||||
:param sequence_flow_name: The name of the connecting sequenceFlow
|
||||
node.
|
||||
"""
|
||||
self.connect(taskspec)
|
||||
s = SequenceFlow(
|
||||
sequence_flow_id, sequence_flow_name, documentation, taskspec)
|
||||
self.outgoing_sequence_flows[taskspec.name] = s
|
||||
self.outgoing_sequence_flows_by_id[sequence_flow_id] = s
|
||||
|
||||
def connect_outgoing_if(self, condition, taskspec, sequence_flow_id,
|
||||
sequence_flow_name, documentation):
|
||||
"""
|
||||
Connect this task spec to the indicated child, if the condition
|
||||
evaluates to true. This should only be called if the task has a
|
||||
connect_if method (e.g. ExclusiveGateway).
|
||||
|
||||
:param sequence_flow_id: The ID of the connecting sequenceFlow node.
|
||||
|
||||
:param sequence_flow_name: The name of the connecting sequenceFlow
|
||||
node.
|
||||
"""
|
||||
self.connect_if(_BpmnCondition(condition), taskspec)
|
||||
s = SequenceFlow(
|
||||
sequence_flow_id, sequence_flow_name, documentation, taskspec)
|
||||
self.outgoing_sequence_flows[taskspec.name] = s
|
||||
self.outgoing_sequence_flows_by_id[sequence_flow_id] = s
|
||||
|
||||
def get_outgoing_sequence_flow_by_spec(self, task_spec):
|
||||
"""
|
||||
Returns the outgoing SequenceFlow targeting the specified task_spec.
|
||||
"""
|
||||
return self.outgoing_sequence_flows[task_spec.name]
|
||||
|
||||
def get_outgoing_sequence_flow_by_id(self, id):
|
||||
"""
|
||||
Returns the outgoing SequenceFlow with the specified ID.
|
||||
"""
|
||||
return self.outgoing_sequence_flows_by_id[id]
|
||||
|
||||
def has_outgoing_sequence_flow(self, id):
|
||||
"""
|
||||
Returns true if the SequenceFlow with the specified ID is leaving this
|
||||
task.
|
||||
"""
|
||||
return id in self.outgoing_sequence_flows_by_id
|
||||
|
||||
def get_outgoing_sequence_names(self):
|
||||
"""
|
||||
Returns a list of the names of outgoing sequences. Some may be None.
|
||||
"""
|
||||
return sorted([s.name for s in
|
||||
list(self.outgoing_sequence_flows_by_id.values())])
|
||||
|
||||
def get_outgoing_sequences(self):
|
||||
"""
|
||||
Returns a list of outgoing sequences. Some may be None.
|
||||
"""
|
||||
return iter(list(self.outgoing_sequence_flows_by_id.values()))
|
||||
|
||||
# Hooks for Custom BPMN tasks ##########
|
||||
|
||||
def entering_waiting_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the WAITING state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def entering_ready_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the READY state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def entering_complete_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the COMPLETE state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def entering_cancelled_state(self, my_task):
|
||||
"""
|
||||
Called when a task enters the CANCELLED state.
|
||||
|
||||
A subclass may override this method to do work when this happens.
|
||||
"""
|
||||
pass
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
super()._on_ready_hook(my_task)
|
||||
for obj in self.data_input_associations:
|
||||
obj.get(my_task)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
|
||||
for obj in self.data_output_associations:
|
||||
obj.set(my_task)
|
||||
|
||||
for obj in self.data_input_associations:
|
||||
# Remove the any copied input variables that might not have already been removed
|
||||
my_task.data.pop(obj.name)
|
||||
|
||||
super(BpmnSpecMixin, self)._on_complete_hook(my_task)
|
||||
if isinstance(my_task.parent.task_spec, BpmnSpecMixin):
|
||||
my_task.parent.task_spec._child_complete_hook(my_task)
|
||||
if not my_task.workflow._is_busy_with_restore():
|
||||
self.entering_complete_state(my_task)
|
||||
|
||||
def _child_complete_hook(self, child_task):
|
||||
pass
|
||||
|
||||
def _on_cancel(self, my_task):
|
||||
super(BpmnSpecMixin, self)._on_cancel(my_task)
|
||||
my_task.workflow._task_cancelled_notify(my_task)
|
||||
if not my_task.workflow._is_busy_with_restore():
|
||||
self.entering_cancelled_state(my_task)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
prev_state = my_task.state
|
||||
super(BpmnSpecMixin, self)._update_hook(my_task)
|
||||
if (prev_state != TaskState.WAITING and my_task.state == TaskState.WAITING and
|
||||
not my_task.workflow._is_busy_with_restore()):
|
||||
self.entering_waiting_state(my_task)
|
||||
|
||||
def _on_ready_before_hook(self, my_task):
|
||||
super(BpmnSpecMixin, self)._on_ready_before_hook(my_task)
|
||||
if not my_task.workflow._is_busy_with_restore():
|
||||
self.entering_ready_state(my_task)
|
|
@ -0,0 +1,62 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
from ...exceptions import WorkflowException
|
||||
|
||||
from .BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...specs import TaskSpec
|
||||
from ...specs.ExclusiveChoice import ExclusiveChoice
|
||||
|
||||
|
||||
class ExclusiveGateway(ExclusiveChoice, BpmnSpecMixin):
|
||||
|
||||
"""
|
||||
Task Spec for a bpmn:exclusiveGateway node.
|
||||
"""
|
||||
|
||||
def test(self):
|
||||
"""
|
||||
Checks whether all required attributes are set. Throws an exception
|
||||
if an error was detected.
|
||||
"""
|
||||
# This has been overridden to allow a single default flow out (without a
|
||||
# condition) - useful for the converging type
|
||||
TaskSpec.test(self)
|
||||
# if len(self.cond_task_specs) < 1:
|
||||
# raise WorkflowException(self, 'At least one output required.')
|
||||
for condition, name in self.cond_task_specs:
|
||||
if name is None:
|
||||
raise WorkflowException(self, 'Condition with no task spec.')
|
||||
task_spec = self._wf_spec.get_task_spec_from_name(name)
|
||||
if task_spec is None:
|
||||
msg = 'Condition leads to non-existent task ' + repr(name)
|
||||
raise WorkflowException(self, msg)
|
||||
if condition is None:
|
||||
continue
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Exclusive Gateway'
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_exclusive_gateway(self)
|
||||
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_exclusive_gateway(wf_spec, s_state)
|
|
@ -0,0 +1,123 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
from collections import deque
|
||||
|
||||
from ...task import TaskState
|
||||
from .UnstructuredJoin import UnstructuredJoin
|
||||
|
||||
|
||||
class InclusiveGateway(UnstructuredJoin):
|
||||
"""
|
||||
Task Spec for a bpmn:parallelGateway node. From the specification of BPMN
|
||||
(http://www.omg.org/spec/BPMN/2.0/PDF - document number:formal/2011-01-03):
|
||||
|
||||
The Inclusive Gateway is activated if
|
||||
* At least one incoming Sequence Flow has at least one token and
|
||||
* For every directed path formed by sequence flow that
|
||||
* starts with a Sequence Flow f of the diagram that has a token,
|
||||
* ends with an incoming Sequence Flow of the inclusive gateway that has
|
||||
no token, and
|
||||
* does not visit the Inclusive Gateway.
|
||||
* There is also a directed path formed by Sequence Flow that
|
||||
* starts with f,
|
||||
* ends with an incoming Sequence Flow of the inclusive gateway that has
|
||||
a token, and
|
||||
* does not visit the Inclusive Gateway.
|
||||
|
||||
Upon execution, a token is consumed from each incoming Sequence Flow that
|
||||
has a token. A token will be produced on some of the outgoing Sequence
|
||||
Flows.
|
||||
|
||||
TODO: Not implemented: At the moment, we can't handle having more than one
|
||||
token at a single incoming sequence
|
||||
TODO: At the moment only converging Inclusive Gateways are supported.
|
||||
|
||||
In order to determine the outgoing Sequence Flows that receive a token, all
|
||||
conditions on the outgoing Sequence Flows are evaluated. The evaluation
|
||||
does not have to respect a certain order.
|
||||
|
||||
For every condition which evaluates to true, a token MUST be passed on the
|
||||
respective Sequence Flow.
|
||||
|
||||
If and only if none of the conditions evaluates to true, the token is
|
||||
passed on the default Sequence Flow.
|
||||
|
||||
In case all conditions evaluate to false and a default flow has not been
|
||||
specified, the Inclusive Gateway throws an exception.
|
||||
"""
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Inclusive Gateway'
|
||||
|
||||
def _check_threshold_unstructured(self, my_task, force=False):
|
||||
|
||||
# Look at the tree to find all ready and waiting tasks (excluding ones
|
||||
# that are our completed inputs).
|
||||
tasks = []
|
||||
for task in my_task.workflow.get_tasks(TaskState.READY | TaskState.WAITING):
|
||||
if task.thread_id != my_task.thread_id:
|
||||
continue
|
||||
if task.workflow != my_task.workflow:
|
||||
continue
|
||||
if task.task_spec == my_task.task_spec:
|
||||
continue
|
||||
tasks.append(task)
|
||||
|
||||
inputs_with_tokens, waiting_tasks = self._get_inputs_with_tokens(
|
||||
my_task)
|
||||
inputs_without_tokens = [
|
||||
i for i in self.inputs if i not in inputs_with_tokens]
|
||||
|
||||
waiting_tasks = []
|
||||
for task in tasks:
|
||||
if (self._has_directed_path_to(
|
||||
task, self,
|
||||
without_using_sequence_flow_from=inputs_with_tokens) and
|
||||
not self._has_directed_path_to(
|
||||
task, self,
|
||||
without_using_sequence_flow_from=inputs_without_tokens)):
|
||||
waiting_tasks.append(task)
|
||||
|
||||
return force or len(waiting_tasks) == 0, waiting_tasks
|
||||
|
||||
def _has_directed_path_to(self, task, task_spec,
|
||||
without_using_sequence_flow_from=None):
|
||||
q = deque()
|
||||
done = set()
|
||||
|
||||
without_using_sequence_flow_from = set(
|
||||
without_using_sequence_flow_from or [])
|
||||
|
||||
q.append(task.task_spec)
|
||||
while q:
|
||||
n = q.popleft()
|
||||
if n == task_spec:
|
||||
return True
|
||||
for child in n.outputs:
|
||||
if child not in done and not (
|
||||
n in without_using_sequence_flow_from and
|
||||
child == task_spec):
|
||||
done.add(child)
|
||||
q.append(child)
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, InclusiveGateway)
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
|
||||
from ...specs import Simple
|
||||
|
||||
|
||||
class ManualTask(Simple, BpmnSpecMixin):
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, ManualTask)
|
||||
|
||||
def is_engine_task(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Manual Task'
|
|
@ -0,0 +1,524 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2020 Sartography
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import copy
|
||||
from builtins import range
|
||||
from uuid import uuid4
|
||||
import re
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from .SubWorkflowTask import SubWorkflowTask, CallActivity
|
||||
from .ParallelGateway import ParallelGateway
|
||||
from .ScriptTask import ScriptTask
|
||||
from .ExclusiveGateway import ExclusiveGateway
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...operators import valueof, is_number
|
||||
from ...specs import SubWorkflow
|
||||
from ...specs.base import TaskSpec
|
||||
from ...util.impl import get_class
|
||||
from ...task import Task, TaskState
|
||||
from ...util.deep_merge import DeepMerge
|
||||
|
||||
|
||||
def gendict(path, d):
|
||||
if len(path) == 0:
|
||||
return d
|
||||
else:
|
||||
return gendict(path[:-1], {path[-1]: d})
|
||||
|
||||
class MultiInstanceTask(TaskSpec):
|
||||
"""
|
||||
When executed, this task performs a split on the current task.
|
||||
The number of outgoing tasks depends on the runtime value of a
|
||||
specified data field.
|
||||
If more than one input is connected, the task performs an implicit
|
||||
multi merge.
|
||||
|
||||
This task has one or more inputs and may have any number of outputs.
|
||||
"""
|
||||
|
||||
def __init__(self, wf_spec, name, times, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:type wf_spec: WorkflowSpec
|
||||
:param wf_spec: A reference to the workflow specification.
|
||||
:type name: str
|
||||
:param name: The name of the task spec.
|
||||
:type times: int or :class:`SpiffWorkflow.operators.Term`
|
||||
:param times: The number of tasks to create.
|
||||
:type kwargs: dict
|
||||
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
|
||||
"""
|
||||
if times is None:
|
||||
raise ValueError('times argument is required')
|
||||
self.times = times
|
||||
|
||||
# We don't really pass these things in (we should), but putting them here to document that they exist
|
||||
self.loopTask = kwargs.get('loopTask', False)
|
||||
self.isSequential = kwargs.get('isSequential', False)
|
||||
self.expanded = kwargs.get('expanded', 1)
|
||||
self.elementVar = kwargs.get('element_var')
|
||||
self.collection = kwargs.get('collection')
|
||||
|
||||
self.multiInstance = True
|
||||
|
||||
TaskSpec.__init__(self, wf_spec, name, **kwargs)
|
||||
|
||||
|
||||
# DO NOT OVERRIDE THE SPEC TYPE.
|
||||
# @property
|
||||
# def spec_type(self):
|
||||
# return 'MultiInstance Task'
|
||||
|
||||
def _find_my_task(self, task):
|
||||
for thetask in task.workflow.task_tree:
|
||||
if thetask.thread_id != task.thread_id:
|
||||
continue
|
||||
if thetask.task_spec == self:
|
||||
return thetask
|
||||
return None
|
||||
|
||||
def _on_trigger(self, task_spec):
|
||||
"""
|
||||
May be called after execute() was already completed to create an
|
||||
additional outbound task.
|
||||
"""
|
||||
|
||||
# Find a Task for this TaksSpec.
|
||||
|
||||
my_task = self._find_my_task(task_spec)
|
||||
if my_task._has_state(TaskState.COMPLETED):
|
||||
state = TaskState.READY
|
||||
else:
|
||||
state = TaskState.FUTURE
|
||||
for output in self.outputs:
|
||||
new_task = my_task._add_child(output, state)
|
||||
new_task.triggered = True
|
||||
output._predict(new_task)
|
||||
|
||||
def _check_inputs(self, my_task):
|
||||
if self.collection is None:
|
||||
return
|
||||
# look for variable in context, if we don't find it, default to 1
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if self.times.name == self.collection.name and type(variable) == type([]):
|
||||
raise WorkflowTaskExecException(my_task,
|
||||
'If we are updating a collection,'
|
||||
' then the collection must be a '
|
||||
'dictionary.')
|
||||
|
||||
def _get_loop_completion(self,my_task):
|
||||
if not self.completioncondition == None:
|
||||
terminate = my_task.workflow.script_engine.evaluate(my_task,self.completioncondition)
|
||||
if terminate:
|
||||
my_task.terminate_current_loop = True
|
||||
return terminate
|
||||
return False
|
||||
|
||||
def _get_count(self, my_task):
|
||||
"""
|
||||
self.times has the text entered in the BPMN model.
|
||||
It could be just a number - in this case return the number
|
||||
it could be a variable name - so we get the variable value from my_task
|
||||
the variable could be a number (text representation??) - in this case return the integer value of the number
|
||||
it could be a list of records - in this case return the cardinality of the list
|
||||
it could be a dict with a bunch of keys - it this case return the cardinality of the keys
|
||||
"""
|
||||
|
||||
if is_number(self.times.name):
|
||||
return int(self.times.name)
|
||||
variable = valueof(my_task, self.times, 1) # look for variable in context, if we don't find it, default to 1
|
||||
|
||||
if is_number(variable):
|
||||
return int(variable)
|
||||
if isinstance(variable,list):
|
||||
return len(variable)
|
||||
if isinstance(variable,dict):
|
||||
return len(variable.keys())
|
||||
return 1 # we shouldn't ever get here, but just in case return a sane value.
|
||||
|
||||
def _get_current_var(self, my_task, pos):
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if is_number(variable):
|
||||
return pos
|
||||
if isinstance(variable,list) and len(variable) >= pos:
|
||||
return variable[pos - 1]
|
||||
elif isinstance(variable,dict) and len(list(variable.keys())) >= pos:
|
||||
return variable[list(variable.keys())[pos - 1]]
|
||||
else:
|
||||
return pos
|
||||
|
||||
def _get_predicted_outputs(self, my_task):
|
||||
split_n = self._get_count(my_task)
|
||||
|
||||
# Predict the outputs.
|
||||
outputs = []
|
||||
for i in range(split_n):
|
||||
outputs += self.outputs
|
||||
|
||||
return outputs
|
||||
|
||||
def _build_gateway_name(self,position):
|
||||
"""
|
||||
Build a unique name for each task - need to be the
|
||||
same over save/restore of the workflow spec.
|
||||
"""
|
||||
return 'Gateway_for_' + str(self.name) + "_" + position
|
||||
|
||||
def _make_new_gateway(self,my_task,suffix,descr):
|
||||
gw_spec = ParallelGateway(self._wf_spec,
|
||||
self._build_gateway_name(suffix),
|
||||
triggered=False,
|
||||
description=descr)
|
||||
gw = Task(my_task.workflow, task_spec=gw_spec)
|
||||
return gw_spec,gw
|
||||
|
||||
def _add_gateway(self, my_task):
|
||||
""" Generate parallel gateway tasks on either side of the current task.
|
||||
This emulates a standard BPMN pattern of having parallel tasks between
|
||||
two parallel gateways.
|
||||
Once we have set up the gateways, we write a note into our internal data so that
|
||||
we don't do it again.
|
||||
"""
|
||||
# Expand this
|
||||
# A-> ME -> C
|
||||
# into this
|
||||
# A -> GW_start -> ME -> GW_end -> C
|
||||
# where GW is a parallel gateway
|
||||
|
||||
|
||||
# check to see if we have already done this, this code gets called multiple times
|
||||
# as we build the tree
|
||||
if my_task.parent.task_spec.name[:11] == 'Gateway_for':
|
||||
return
|
||||
|
||||
# build the gateway specs and the tasks.
|
||||
# Spiff wants a distinct spec for each task
|
||||
# that it has in the workflow or it will throw an error
|
||||
start_gw_spec, start_gw = self._make_new_gateway(my_task,'start','Begin Gateway')
|
||||
end_gw_spec, end_gw = self._make_new_gateway(my_task,'end','End Gateway')
|
||||
|
||||
# Set up the parent task and insert it into the workflow
|
||||
|
||||
# remove the current task spec from the parent, it will be replaced with the new construct.
|
||||
my_task.parent.task_spec.outputs = [x for x in my_task.parent.task_spec.outputs if x != my_task.task_spec]
|
||||
|
||||
# in the case that our parent is a gateway with a default route,
|
||||
# we need to ensure that the default route is empty
|
||||
# so that connect can set it up properly
|
||||
if hasattr(my_task.parent.task_spec,'default_task_spec') and \
|
||||
my_task.parent.task_spec.default_task_spec == my_task.task_spec.name:
|
||||
my_task.parent.task_spec.default_task_spec = None
|
||||
my_task.parent.task_spec.connect(start_gw_spec)
|
||||
elif isinstance(my_task.parent.task_spec, ExclusiveGateway):
|
||||
for cond, name in [ (cond, name) for cond, name in my_task.parent.task_spec.cond_task_specs\
|
||||
if name == my_task.task_spec.name]:
|
||||
my_task.parent.task_spec.cond_task_specs.remove((cond, name))
|
||||
my_task.parent.task_spec.cond_task_specs.append((cond, start_gw_spec.name))
|
||||
start_gw_spec.inputs.append(my_task.parent.task_spec)
|
||||
else:
|
||||
my_task.parent.task_spec.outputs.append(start_gw_spec)
|
||||
start_gw_spec.inputs.append(my_task.parent.task_spec)
|
||||
|
||||
# get a list of all siblings and replace myself with the new gateway task
|
||||
# in the parent task
|
||||
newchildren = []
|
||||
for child in my_task.parent.children:
|
||||
if child == my_task:
|
||||
newchildren.append(start_gw)
|
||||
else:
|
||||
newchildren.append(child)
|
||||
my_task.parent.children = newchildren
|
||||
|
||||
# update the gatways parent to be my parent
|
||||
start_gw.parent = my_task.parent
|
||||
# update my parent to be the gateway
|
||||
my_task.parent = start_gw
|
||||
start_gw_spec.connect(self)
|
||||
start_gw.children = [my_task]
|
||||
|
||||
# transfer my outputs to the ending gateway and set up the
|
||||
# child parent links
|
||||
end_gw_spec.outputs = self.outputs.copy()
|
||||
self.connect(end_gw_spec)
|
||||
self.outputs = [end_gw_spec]
|
||||
end_gw.parent = my_task
|
||||
my_task.children = [end_gw]
|
||||
|
||||
def multiinstance_info(self, my_task):
|
||||
split_n = self._get_count(my_task)
|
||||
|
||||
runtimes = int(my_task._get_internal_data('runtimes', 1)) # set a default if not already run
|
||||
loop = False
|
||||
parallel = False
|
||||
sequential = False
|
||||
|
||||
if my_task.task_spec.loopTask:
|
||||
loop = True
|
||||
elif my_task.task_spec.isSequential:
|
||||
sequential = True
|
||||
else:
|
||||
parallel = True
|
||||
|
||||
return {'is_looping': loop,
|
||||
'is_sequential_mi': sequential,
|
||||
'is_parallel_mi': parallel,
|
||||
'mi_count': split_n,
|
||||
'mi_index': runtimes}
|
||||
|
||||
|
||||
def _make_new_child_task(self,my_task,x):
|
||||
# here we generate a distinct copy of our original task each
|
||||
# parallel instance, and hook them up into the task tree
|
||||
new_child = copy.copy(my_task)
|
||||
new_child.id = uuid4()
|
||||
# I think we will need to update both every variables
|
||||
# internal data and the copy of the public data to get the
|
||||
# variables correct
|
||||
new_child.internal_data = copy.deepcopy(my_task.internal_data)
|
||||
|
||||
new_child.internal_data[
|
||||
'runtimes'] = x + 2 # working with base 1 and we already have one done
|
||||
|
||||
new_child.data = copy.deepcopy(my_task.data)
|
||||
new_child.data[self.elementVar] = self._get_current_var(my_task,
|
||||
x + 2)
|
||||
|
||||
new_child.children = [] # these will be updated later
|
||||
# in the case of parallel, the children list will get updated during the predict loop
|
||||
return new_child
|
||||
|
||||
def _expand_sequential(self,my_task,split_n):
|
||||
# this should be only for SMI and not looping tasks -
|
||||
# we need to patch up the children and make sure they chain correctly
|
||||
# this is different from PMI because the children all link together, not to
|
||||
# the gateways on both ends.
|
||||
# first let's check for a task in the task spec tree
|
||||
|
||||
# we have to jump through some hoops to determine if we have already
|
||||
# expanded this properly as we may have a cardinality that may change
|
||||
# and this code gets run a bunch of times.
|
||||
expanded = getattr(self, 'expanded', 1)
|
||||
if split_n >= expanded:
|
||||
setattr(self, 'expanded', split_n)
|
||||
|
||||
if not (expanded == split_n):
|
||||
|
||||
# Initialize based on current task
|
||||
my_task_copy = copy.copy(my_task)
|
||||
current_task = my_task
|
||||
current_task_spec = self
|
||||
proto_task_spec = copy.copy(self)
|
||||
|
||||
# Essentially we are expanding like this:
|
||||
# A -> B0 -> C
|
||||
# A -> B0 -> B1 -> B2 -> C
|
||||
# each new child has the last child we created as its parent
|
||||
# and the outputs of what B0 had previously.
|
||||
# this has to be done for both the task and the task spec.
|
||||
|
||||
for x in range(split_n - expanded):
|
||||
# create Bx from Bx-1
|
||||
new_child = self._make_new_child_task(my_task,x)
|
||||
# set children of Bx = children of B0
|
||||
new_child.children = copy.copy(my_task_copy.children)
|
||||
# all of C's parents should be Bx
|
||||
for child in new_child.children:
|
||||
child.parent = new_child
|
||||
# create a new task spec for this new task and update it
|
||||
new_task_spec = self._make_new_task_spec(proto_task_spec, my_task, x)
|
||||
new_child.task_spec = new_task_spec
|
||||
new_child._set_state(TaskState.MAYBE)
|
||||
|
||||
# update task spec inputs and outputs like we did for the task
|
||||
current_task_spec.outputs = [new_task_spec]
|
||||
new_task_spec.inputs = [current_task_spec]
|
||||
current_task.children = [new_child]
|
||||
# update the parent of the new task
|
||||
new_child.parent = current_task
|
||||
# set up variables for next pass.
|
||||
current_task = new_child
|
||||
current_task_spec = new_task_spec
|
||||
|
||||
def _expand_parallel(self,my_task,split_n):
|
||||
# add a parallel gateway on either side of this task
|
||||
self._add_gateway(my_task)
|
||||
# we use the child count of the parallel gateway to determine
|
||||
# if we have expanded this or not. Children of the gateway we just created
|
||||
# should match the split level provided by the multiinstance
|
||||
|
||||
for x in range(split_n - len(my_task.parent.children)):
|
||||
new_child = self._make_new_child_task(my_task,x)
|
||||
new_task_spec = self._make_new_task_spec(my_task.task_spec, my_task, x)
|
||||
new_child.task_spec = new_task_spec
|
||||
# patch up the right hand side gateway
|
||||
self.outputs[0].inputs.append(new_task_spec)
|
||||
# patch up the left hand side gateway task and task_spec
|
||||
my_task.parent.children.append(new_child)
|
||||
my_task.parent.task_spec.outputs.append(new_task_spec)
|
||||
|
||||
def _make_new_task_spec(self,proto_task_spec,my_task,suffix):
|
||||
|
||||
new_task_spec = copy.copy(proto_task_spec)
|
||||
new_task_spec.name = new_task_spec.name + "_%d" % suffix
|
||||
new_task_spec.id = str(new_task_spec.id) + "_%d" % suffix
|
||||
my_task.workflow.spec.task_specs[new_task_spec.name] = new_task_spec # add to registry
|
||||
return new_task_spec
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
|
||||
split_n = self._get_count(my_task)
|
||||
runtimes = int(my_task._get_internal_data('runtimes', 1)) # set a default if not already run
|
||||
|
||||
my_task._set_internal_data(splits=split_n, runtimes=runtimes)
|
||||
if not self.elementVar:
|
||||
self.elementVar = my_task.task_spec.name + "_CurrentVar"
|
||||
|
||||
my_task.data[self.elementVar] = copy.copy(self._get_current_var(my_task, runtimes))
|
||||
|
||||
# Create the outgoing tasks.
|
||||
outputs = []
|
||||
# In the special case that this is a Parallel multiInstance, we need
|
||||
# to expand the children in the middle. This method gets called
|
||||
# during every pass through the tree, so we need to wait until our
|
||||
# real cardinality gets updated to expand the tree.
|
||||
if (not self.isSequential):
|
||||
self._expand_parallel(my_task,split_n)
|
||||
|
||||
elif not self.loopTask:
|
||||
self._expand_sequential(my_task,split_n)
|
||||
|
||||
outputs += self.outputs
|
||||
if my_task._is_definite():
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
else:
|
||||
my_task._sync_children(outputs, TaskState.LIKELY)
|
||||
|
||||
def _handle_special_cases(self, my_task):
|
||||
classes = [BusinessRuleTask, ScriptTask, SubWorkflowTask, SubWorkflow, CallActivity]
|
||||
classes = {x.__module__ + "." + x.__name__: x for x in classes}
|
||||
terminate = self._get_loop_completion(my_task)
|
||||
if my_task.task_spec.prevtaskclass in classes.keys() and not terminate:
|
||||
super()._on_complete_hook(my_task)
|
||||
|
||||
def _merge_element_variable(self,my_task,collect,runtimes,colvarname):
|
||||
# if we are updating the same collection as was our loopcardinality
|
||||
# then all the keys should be there and we can use the sorted keylist
|
||||
# if not, we use an integer - we should be guaranteed that the
|
||||
# collection is a dictionary
|
||||
if self.collection is not None and self.times.name == self.collection.name:
|
||||
keys = list(collect.keys())
|
||||
if len(keys) < runtimes:
|
||||
msg = f"There is a mismatch between runtimes and the number " \
|
||||
f"items in the collection, please check for empty " \
|
||||
f"collection {self.collection.name}."
|
||||
raise WorkflowTaskExecException(my_task, msg)
|
||||
|
||||
runtimesvar = keys[runtimes - 1]
|
||||
else:
|
||||
runtimesvar = runtimes
|
||||
|
||||
if self.elementVar in my_task.data and isinstance(my_task.data[self.elementVar], dict):
|
||||
collect[str(runtimesvar)] = DeepMerge.merge(collect.get(runtimesvar, {}),
|
||||
copy.copy(my_task.data[self.elementVar]))
|
||||
|
||||
my_task.data = DeepMerge.merge(my_task.data,
|
||||
gendict(colvarname.split('/'), collect))
|
||||
|
||||
|
||||
def _update_sibling_data(self,my_task,runtimes,runcount,colvarname,collect):
|
||||
if (runtimes < runcount) and not my_task.terminate_current_loop and self.loopTask:
|
||||
my_task._set_state(TaskState.READY)
|
||||
my_task._set_internal_data(runtimes=runtimes + 1)
|
||||
my_task.data[self.elementVar] = self._get_current_var(my_task, runtimes + 1)
|
||||
element_var_data = None
|
||||
else:
|
||||
# The element var data should not be passed on to children
|
||||
# but will add this back onto this task later.
|
||||
element_var_data = my_task.data.pop(self.elementVar, None)
|
||||
|
||||
# if this is a parallel mi - then update all siblings with the
|
||||
# current data
|
||||
if not self.isSequential:
|
||||
for task in my_task.parent.children:
|
||||
task.data = DeepMerge.merge(
|
||||
task.data,
|
||||
gendict(colvarname.split('/'),
|
||||
collect)
|
||||
)
|
||||
return element_var_data
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
# do special stuff for non-user tasks
|
||||
self._handle_special_cases(my_task)
|
||||
self.__iteration_complete(my_task)
|
||||
|
||||
def __iteration_complete(self, my_task):
|
||||
|
||||
# this is all about updating the collection for a MI
|
||||
self._check_inputs(my_task)
|
||||
|
||||
# initialize
|
||||
runcount = self._get_count(my_task)
|
||||
runtimes = int(my_task._get_internal_data('runtimes', 1))
|
||||
|
||||
if self.collection is not None:
|
||||
colvarname = self.collection.name
|
||||
else:
|
||||
colvarname = my_task.task_spec.name
|
||||
|
||||
collect = valueof(my_task, self.collection, {})
|
||||
|
||||
self._merge_element_variable(my_task,collect,runtimes,colvarname)
|
||||
|
||||
element_var_data = self._update_sibling_data(my_task,runtimes,runcount,colvarname,collect)
|
||||
|
||||
# please see MultiInstance code for previous version
|
||||
outputs = []
|
||||
outputs += self.outputs
|
||||
|
||||
if not isinstance(my_task.task_spec,SubWorkflowTask):
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
# If removed, add the element_var_data back onto this task, after
|
||||
# updating the children.
|
||||
if(element_var_data):
|
||||
my_task.data[self.elementVar] = element_var_data
|
||||
|
||||
def serialize(self, serializer):
|
||||
|
||||
return serializer.serialize_multi_instance(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
prevclass = get_class(s_state['prevtaskclass'])
|
||||
spec = getDynamicMIClass(s_state['name'], prevclass)(wf_spec,s_state['name'],s_state['times'])
|
||||
spec.prevtaskclass = s_state['prevtaskclass']
|
||||
|
||||
return serializer.deserialize_multi_instance(wf_spec, s_state, spec)
|
||||
|
||||
|
||||
def getDynamicMIClass(id,prevclass):
|
||||
id = re.sub('(.+)_[0-9]$','\\1',id)
|
||||
return type(id + '_class', (
|
||||
MultiInstanceTask, prevclass), {})
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
from ...specs import Simple
|
||||
|
||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
|
||||
|
||||
class NoneTask(Simple, BpmnSpecMixin):
|
||||
|
||||
def is_engine_task(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Task'
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, NoneTask)
|
|
@ -0,0 +1,56 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
from .UnstructuredJoin import UnstructuredJoin
|
||||
|
||||
|
||||
class ParallelGateway(UnstructuredJoin):
|
||||
"""
|
||||
Task Spec for a bpmn:parallelGateway node. From the specification of BPMN
|
||||
(http://www.omg.org/spec/BPMN/2.0/PDF - document number:formal/2011-01-03):
|
||||
|
||||
The Parallel Gateway is activated if there is at least one token on
|
||||
each incoming Sequence Flow.
|
||||
|
||||
The Parallel Gateway consumes exactly one token from each incoming
|
||||
|
||||
Sequence Flow and produces exactly one token at each outgoing
|
||||
Sequence Flow.
|
||||
|
||||
TODO: Not implemented:
|
||||
If there are excess tokens at an incoming Sequence Flow, these tokens
|
||||
remain at this Sequence Flow after execution of the Gateway.
|
||||
|
||||
Essentially, this means that we must wait until we have a completed parent
|
||||
task on each incoming sequence.
|
||||
"""
|
||||
|
||||
def _check_threshold_unstructured(self, my_task, force=False):
|
||||
completed_inputs, waiting_tasks = self._get_inputs_with_tokens(my_task)
|
||||
|
||||
# If the threshold was reached, get ready to fire.
|
||||
return (force or len(completed_inputs) >= len(self.inputs),
|
||||
waiting_tasks)
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Parallel Gateway'
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic(wf_spec, s_state, ParallelGateway)
|
|
@ -0,0 +1,65 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...task import TaskState
|
||||
from ...specs.Simple import Simple
|
||||
|
||||
|
||||
class ScriptEngineTask(Simple, BpmnSpecMixin):
|
||||
"""Task Spec for a bpmn:scriptTask node"""
|
||||
|
||||
def _execute(self, task):
|
||||
"""Please override for specific Implementations, see ScriptTask below for an example"""
|
||||
pass
|
||||
|
||||
def _on_complete_hook(self, task):
|
||||
try:
|
||||
self._execute(task)
|
||||
super(ScriptEngineTask, self)._on_complete_hook(task)
|
||||
except Exception as exc:
|
||||
task._set_state(TaskState.WAITING)
|
||||
raise exc
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_script_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_script_task(wf_spec, s_state)
|
||||
|
||||
|
||||
class ScriptTask(ScriptEngineTask):
|
||||
|
||||
def __init__(self, wf_spec, name, script, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param script: the script that must be executed by the script engine.
|
||||
"""
|
||||
super(ScriptTask, self).__init__(wf_spec, name, **kwargs)
|
||||
self.script = script
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Script Task'
|
||||
|
||||
def _execute(self, task):
|
||||
task.workflow.script_engine.execute(task, self.script)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .ScriptTask import ScriptEngineTask
|
||||
|
||||
|
||||
class ServiceTask(ScriptEngineTask):
|
||||
|
||||
"""
|
||||
Task Spec for a bpmn:serviceTask node.
|
||||
"""
|
||||
|
||||
def __init__(self, wf_spec, name, **kwargs):
|
||||
super(ServiceTask, self).__init__(wf_spec, name, **kwargs)
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Service Task'
|
|
@ -0,0 +1,130 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from copy import deepcopy
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from .BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...specs.SubWorkflow import SubWorkflow
|
||||
from ...specs import TaskSpec
|
||||
|
||||
|
||||
class SubWorkflowTask(SubWorkflow, BpmnSpecMixin):
|
||||
"""
|
||||
Task Spec for a bpmn node containing a subworkflow.
|
||||
"""
|
||||
def __init__(self, wf_spec, name, subworkflow_spec, transaction=False, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param bpmn_wf_spec: the BpmnProcessSpec for the sub process.
|
||||
:param bpmn_wf_class: the BpmnWorkflow class to instantiate
|
||||
"""
|
||||
super(SubWorkflowTask, self).__init__(wf_spec, name, None, **kwargs)
|
||||
self.spec = subworkflow_spec
|
||||
self.transaction = transaction
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Subprocess'
|
||||
|
||||
def test(self):
|
||||
TaskSpec.test(self)
|
||||
|
||||
def _on_ready_before_hook(self, my_task):
|
||||
subworkflow = my_task.workflow.create_subprocess(my_task, self.spec, self.name)
|
||||
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
|
||||
subworkflow.data = deepcopy(my_task.workflow.data)
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
|
||||
for obj in self.data_input_associations:
|
||||
obj.get(my_task)
|
||||
|
||||
subworkflow = my_task.workflow.get_subprocess(my_task)
|
||||
start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow)
|
||||
|
||||
if len(subworkflow.spec.data_inputs) == 0:
|
||||
# Copy all task data into start task if no inputs specified
|
||||
start[0].set_data(**my_task.data)
|
||||
else:
|
||||
# Otherwise copy only task data with the specified names
|
||||
for var in subworkflow.spec.data_inputs:
|
||||
var.copy(my_task, start[0], data_input=True)
|
||||
|
||||
self._predict(my_task)
|
||||
for child in subworkflow.task_tree.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
def _on_subworkflow_completed(self, subworkflow, my_task):
|
||||
|
||||
# Shouldn't this always be true?
|
||||
if isinstance(my_task.parent.task_spec, BpmnSpecMixin):
|
||||
my_task.parent.task_spec._child_complete_hook(my_task)
|
||||
|
||||
if len(subworkflow.spec.data_outputs) == 0:
|
||||
# Copy all workflow data if no outputs are specified
|
||||
my_task.data = deepcopy(subworkflow.data)
|
||||
else:
|
||||
end = subworkflow.get_tasks_from_spec_name('End', workflow=subworkflow)
|
||||
# Otherwise only copy data with the specified names
|
||||
for var in subworkflow.spec.data_outputs:
|
||||
var.copy(end[0], my_task, data_output=True)
|
||||
|
||||
my_task._set_state(TaskState.READY)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
wf = my_task.workflow._get_outermost_workflow(my_task)
|
||||
if my_task.id not in wf.subprocesses:
|
||||
super()._update_hook(my_task)
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
# The base Subworkflow task predict doesn't work with the loop reset task
|
||||
BpmnSpecMixin._predict_hook(self, my_task)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
BpmnSpecMixin._on_complete_hook(self, my_task)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
def _on_cancel(self, my_task):
|
||||
subworkflow = my_task.workflow.get_subprocess(my_task)
|
||||
if subworkflow is not None:
|
||||
subworkflow.cancel()
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_subworkflow_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_subworkflow_task(wf_spec, s_state, SubWorkflowTask)
|
||||
|
||||
def task_will_set_children_future(self, my_task):
|
||||
my_task.workflow.delete_subprocess(my_task)
|
||||
|
||||
|
||||
class CallActivity(SubWorkflowTask):
|
||||
|
||||
def __init__(self, wf_spec, name, subworkflow_spec, **kwargs):
|
||||
super(CallActivity, self).__init__(wf_spec, name, subworkflow_spec, False, **kwargs)
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Call Activity'
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_subworkflow_task(wf_spec, s_state, CallActivity)
|
||||
|
||||
class TransactionSubprocess(SubWorkflowTask):
|
||||
|
||||
def __init__(self, wf_spec, name, subworkflow_spec, **kwargs):
|
||||
super(TransactionSubprocess, self).__init__(wf_spec, name, subworkflow_spec, True, **kwargs)
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Transactional Subprocess'
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_subworkflow_task(wf_spec, s_state, TransactionSubprocess)
|
|
@ -0,0 +1,169 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from ...exceptions import WorkflowException
|
||||
|
||||
from ...task import TaskState
|
||||
from .BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...specs.Join import Join
|
||||
|
||||
|
||||
|
||||
class UnstructuredJoin(Join, BpmnSpecMixin):
|
||||
"""
|
||||
A helper subclass of Join that makes it work in a slightly friendlier way
|
||||
for the BPMN style threading
|
||||
"""
|
||||
|
||||
def _check_threshold_unstructured(self, my_task, force=False):
|
||||
raise NotImplementedError("Please implement this in the subclass")
|
||||
|
||||
def _get_inputs_with_tokens(self, my_task):
|
||||
# Look at the tree to find all places where this task is used.
|
||||
tasks = []
|
||||
for task in my_task.workflow.task_tree:
|
||||
if task.thread_id != my_task.thread_id:
|
||||
continue
|
||||
if task.workflow != my_task.workflow:
|
||||
continue
|
||||
if task.task_spec != self:
|
||||
continue
|
||||
if task._is_finished():
|
||||
continue
|
||||
tasks.append(task)
|
||||
|
||||
# Look up which tasks have parent's completed.
|
||||
waiting_tasks = []
|
||||
completed_inputs = set()
|
||||
for task in tasks:
|
||||
if task.parent._has_state(TaskState.COMPLETED) and (
|
||||
task._has_state(TaskState.WAITING) or task == my_task):
|
||||
if task.parent.task_spec in completed_inputs:
|
||||
raise(WorkflowException
|
||||
(task.task_spec,
|
||||
"Unsupported looping behaviour: two threads waiting"
|
||||
" on the same sequence flow."))
|
||||
completed_inputs.add(task.parent.task_spec)
|
||||
else:
|
||||
waiting_tasks.append(task.parent)
|
||||
|
||||
return completed_inputs, waiting_tasks
|
||||
|
||||
def _do_join(self, my_task):
|
||||
# Copied from Join parent class
|
||||
# This has some minor changes
|
||||
|
||||
# One Join spec may have multiple corresponding Task objects::
|
||||
#
|
||||
# - Due to the MultiInstance pattern.
|
||||
# - Due to the ThreadSplit pattern.
|
||||
#
|
||||
# When using the MultiInstance pattern, we want to join across
|
||||
# the resulting task instances. When using the ThreadSplit
|
||||
# pattern, we only join within the same thread. (Both patterns
|
||||
# may also be mixed.)
|
||||
#
|
||||
# We are looking for all task instances that must be joined.
|
||||
# We limit our search by starting at the split point.
|
||||
if self.split_task:
|
||||
split_task = my_task.workflow.get_task_spec_from_name(
|
||||
self.split_task)
|
||||
split_task = my_task._find_ancestor(split_task)
|
||||
else:
|
||||
split_task = my_task.workflow.task_tree
|
||||
|
||||
# Identify all corresponding task instances within the thread.
|
||||
# Also remember which of those instances was most recently changed,
|
||||
# because we are making this one the instance that will
|
||||
# continue the thread of control. In other words, we will continue
|
||||
# to build the task tree underneath the most recently changed task.
|
||||
last_changed = None
|
||||
thread_tasks = []
|
||||
for task in split_task._find_any(self):
|
||||
# Ignore tasks from other threads.
|
||||
if task.thread_id != my_task.thread_id:
|
||||
continue
|
||||
# Ignore tasks from other subprocesses:
|
||||
if task.workflow != my_task.workflow:
|
||||
continue
|
||||
|
||||
# Ignore my outgoing branches.
|
||||
if task._is_descendant_of(my_task):
|
||||
continue
|
||||
# Ignore completed tasks (this is for loop handling)
|
||||
if task._is_finished():
|
||||
continue
|
||||
|
||||
# For an inclusive join, this can happen - it's a future join
|
||||
if not task.parent._is_finished():
|
||||
continue
|
||||
|
||||
# We have found a matching instance.
|
||||
thread_tasks.append(task)
|
||||
|
||||
# Check whether the state of the instance was recently
|
||||
# changed.
|
||||
changed = task.parent.last_state_change
|
||||
if last_changed is None\
|
||||
or changed > last_changed.parent.last_state_change:
|
||||
last_changed = task
|
||||
|
||||
# Update data from all the same thread tasks.
|
||||
thread_tasks.sort(key=lambda t: t.parent.last_state_change)
|
||||
for task in thread_tasks:
|
||||
self.data.update(task.data)
|
||||
|
||||
# Mark the identified task instances as COMPLETED. The exception
|
||||
# is the most recently changed task, for which we assume READY.
|
||||
# By setting the state to READY only, we allow for calling
|
||||
# :class:`Task.complete()`, which leads to the task tree being
|
||||
# (re)built underneath the node.
|
||||
for task in thread_tasks:
|
||||
if task == last_changed:
|
||||
task.data.update(self.data)
|
||||
self.entered_event.emit(my_task.workflow, my_task)
|
||||
task._ready()
|
||||
else:
|
||||
task._set_state(TaskState.COMPLETED)
|
||||
task._drop_children()
|
||||
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
if my_task._is_predicted():
|
||||
self._predict(my_task)
|
||||
if not my_task.parent._is_finished():
|
||||
return
|
||||
|
||||
target_state = getattr(my_task, '_bpmn_load_target_state', None)
|
||||
if target_state == TaskState.WAITING:
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
return
|
||||
|
||||
super(UnstructuredJoin, self)._update_hook(my_task)
|
||||
|
||||
def task_should_set_children_future(self, my_task):
|
||||
return True
|
||||
|
||||
def task_will_set_children_future(self, my_task):
|
||||
# go find all of the gateways with the same name as this one,
|
||||
# drop children and set state to WAITING
|
||||
for t in list(my_task.workflow.task_tree):
|
||||
if t.task_spec.name == self.name and t.state == TaskState.COMPLETED:
|
||||
t._set_state(TaskState.WAITING)
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...specs.Simple import Simple
|
||||
|
||||
|
||||
class UserTask(Simple, BpmnSpecMixin):
|
||||
|
||||
"""
|
||||
Task Spec for a bpmn:userTask node.
|
||||
"""
|
||||
|
||||
def is_engine_task(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'User Task'
|
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
|
@ -0,0 +1,71 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .event_types import ThrowingEvent
|
||||
from .event_definitions import TerminateEventDefinition, CancelEventDefinition
|
||||
from ....task import TaskState
|
||||
|
||||
|
||||
class EndEvent(ThrowingEvent):
|
||||
"""
|
||||
Task Spec for a bpmn:endEvent node.
|
||||
|
||||
From the specification of BPMN (http://www.omg.org/spec/BPMN/2.0/PDF -
|
||||
document number:formal/2011-01-03): For a "terminate" End Event, the
|
||||
Process is abnormally terminated - no other ongoing Process instances are
|
||||
affected.
|
||||
|
||||
For all other End Events, the behavior associated with the Event type is
|
||||
performed, e.g., the associated Message is sent for a Message End Event,
|
||||
the associated signal is sent for a Signal End Event, and so on. The
|
||||
Process instance is then completed, if and only if the following two
|
||||
conditions hold:
|
||||
* All start nodes of the Process have been visited. More precisely, all
|
||||
Start Events have been triggered, and for all starting Event-Based
|
||||
Gateways, one of the associated Events has been triggered.
|
||||
* There is no token remaining within the Process instance.
|
||||
"""
|
||||
|
||||
def __init__(self, wf_spec, name, event_definition, **kwargs):
|
||||
super(EndEvent, self).__init__(wf_spec, name, event_definition, **kwargs)
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'End Event'
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
|
||||
super(EndEvent, self)._on_complete_hook(my_task)
|
||||
|
||||
if isinstance(self.event_definition, TerminateEventDefinition):
|
||||
|
||||
# We are finished. Set the workflow data and cancel all tasks
|
||||
my_task.workflow.set_data(**my_task.data)
|
||||
for task in my_task.workflow.get_tasks(TaskState.NOT_FINISHED_MASK, workflow=my_task.workflow):
|
||||
task.cancel()
|
||||
|
||||
elif isinstance(self.event_definition, CancelEventDefinition):
|
||||
my_task.workflow.cancel()
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, EndEvent)
|
|
@ -0,0 +1,150 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .event_types import ThrowingEvent, CatchingEvent
|
||||
from .event_definitions import CycleTimerEventDefinition
|
||||
from ..BpmnSpecMixin import BpmnSpecMixin
|
||||
from ....specs.Simple import Simple
|
||||
from ....task import TaskState
|
||||
|
||||
class SendTask(ThrowingEvent):
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Send Task'
|
||||
|
||||
|
||||
class ReceiveTask(CatchingEvent):
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Receive Task'
|
||||
|
||||
|
||||
class IntermediateCatchEvent(CatchingEvent):
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return f'{self.event_definition.event_type} Catching Event'
|
||||
|
||||
|
||||
class IntermediateThrowEvent(ThrowingEvent):
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return f'{self.event_definition.event_type} Throwing Event'
|
||||
|
||||
|
||||
class _BoundaryEventParent(Simple, BpmnSpecMixin):
|
||||
"""This task is inserted before a task with boundary events."""
|
||||
|
||||
# I wonder if this would be better modelled as some type of join.
|
||||
# It would make more sense to have the boundary events and the task
|
||||
# they're attached to be inputs rather than outputs.
|
||||
|
||||
def __init__(self, wf_spec, name, main_child_task_spec, **kwargs):
|
||||
|
||||
super(_BoundaryEventParent, self).__init__(wf_spec, name)
|
||||
self.main_child_task_spec = main_child_task_spec
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Boundary Event Parent'
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
|
||||
# Clear any events that our children might have received and
|
||||
# wait for new events
|
||||
for child in my_task.children:
|
||||
if isinstance(child.task_spec, BoundaryEvent):
|
||||
child.task_spec.event_definition.reset(child)
|
||||
child._set_state(TaskState.WAITING)
|
||||
|
||||
def _child_complete_hook(self, child_task):
|
||||
|
||||
# If the main child completes, or a cancelling event occurs, cancel any
|
||||
# unfinished children
|
||||
if child_task.task_spec == self.main_child_task_spec or child_task.task_spec.cancel_activity:
|
||||
for sibling in child_task.parent.children:
|
||||
if sibling == child_task:
|
||||
continue
|
||||
if sibling.task_spec == self.main_child_task_spec or not sibling._is_finished():
|
||||
sibling.cancel()
|
||||
for t in child_task.workflow._get_waiting_tasks():
|
||||
t.task_spec._update(t)
|
||||
|
||||
# If our event is a cycle timer, we need to set it back to waiting so it can fire again
|
||||
elif isinstance(child_task.task_spec.event_definition, CycleTimerEventDefinition):
|
||||
child_task._set_state(TaskState.WAITING)
|
||||
child_task.task_spec._update_hook(child_task)
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
|
||||
# Events attached to the main task might occur
|
||||
my_task._sync_children(self.outputs, state=TaskState.MAYBE)
|
||||
# The main child's state is based on this task's state
|
||||
state = TaskState.FUTURE if my_task._is_definite() else my_task.state
|
||||
for child in my_task.children:
|
||||
if child.task_spec == self.main_child_task_spec:
|
||||
child._set_state(state)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_boundary_event_parent(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_boundary_event_parent(wf_spec, s_state, cls)
|
||||
|
||||
class BoundaryEvent(CatchingEvent):
|
||||
"""Task Spec for a bpmn:boundaryEvent node."""
|
||||
|
||||
def __init__(self, wf_spec, name, event_definition, cancel_activity, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param cancel_activity: True if this is a Cancelling boundary event.
|
||||
"""
|
||||
super(BoundaryEvent, self).__init__(wf_spec, name, event_definition, **kwargs)
|
||||
self.cancel_activity = cancel_activity
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
interrupting = 'Interrupting' if self.cancel_activity else 'Non-Interrupting'
|
||||
return f'{interrupting} {self.event_definition.event_type} Event'
|
||||
|
||||
|
||||
def catches(self, my_task, event_definition, correlations=None):
|
||||
# Boundary events should only be caught while waiting
|
||||
return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING
|
||||
|
||||
def catch(self, my_task, event_definition):
|
||||
super(BoundaryEvent, self).catch(my_task, event_definition)
|
||||
my_task.complete()
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
super(BoundaryEvent, self)._on_complete_hook(my_task)
|
||||
# Notify the boundary event parent as well.
|
||||
my_task.parent.task_spec._child_complete_hook(my_task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_boundary_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_boundary_event(wf_spec, s_state, cls)
|
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .event_types import CatchingEvent
|
||||
from ....task import TaskState
|
||||
|
||||
|
||||
class StartEvent(CatchingEvent):
|
||||
"""Task Spec for a bpmn:startEvent node with an optional event definition."""
|
||||
|
||||
def __init__(self, wf_spec, name, event_definition, **kwargs):
|
||||
super(StartEvent, self).__init__(wf_spec, name, event_definition, **kwargs)
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return f'{self.event_definition.event_type} Start Event'
|
||||
|
||||
def catch(self, my_task, event_definition):
|
||||
|
||||
# We might need to revisit a start event after it completes or
|
||||
# if it got cancelled so we'll still catch messages even if we're finished
|
||||
if my_task.state == TaskState.COMPLETED or my_task.state == TaskState.CANCELLED:
|
||||
my_task.set_children_future()
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
super(StartEvent, self).catch(my_task, event_definition)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, StartEvent)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
from .StartEvent import StartEvent
|
||||
from .EndEvent import EndEvent
|
||||
from .IntermediateEvent import IntermediateCatchEvent, IntermediateThrowEvent, BoundaryEvent, _BoundaryEventParent, SendTask, ReceiveTask
|
||||
from .event_definitions import (NoneEventDefinition, CancelEventDefinition, ErrorEventDefinition, EscalationEventDefinition, MessageEventDefinition,
|
||||
SignalEventDefinition, TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition)
|
|
@ -0,0 +1,400 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import datetime
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
class EventDefinition(object):
|
||||
"""
|
||||
This is the base class for Event Definitions. It implements the default throw/catch
|
||||
behavior for events.
|
||||
|
||||
If internal is true, this event should be thrown to the current workflow
|
||||
If external is true, this event should be thrown to the outer workflow
|
||||
|
||||
Default throw behavior is to send the event based on the values of the internal
|
||||
and external flags.
|
||||
Default catch behavior is to set the event to fired
|
||||
"""
|
||||
|
||||
# Format to use for specifying dates for time based events
|
||||
TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
|
||||
|
||||
def __init__(self):
|
||||
# Ideally I'd mke these parameters, but I don't want to them to be parameters
|
||||
# for any subclasses (as they are based on event type, not user choice) and
|
||||
# I don't want to write a separate deserializer for every every type.
|
||||
self.internal, self.external = True, True
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return f'{self.__class__.__module__}.{self.__class__.__name__}'
|
||||
|
||||
def has_fired(self, my_task):
|
||||
return my_task._get_internal_data('event_fired', False)
|
||||
|
||||
def catch(self, my_task, event_definition=None):
|
||||
my_task._set_internal_data(event_fired=True)
|
||||
|
||||
def throw(self, my_task):
|
||||
self._throw(
|
||||
event=my_task.task_spec.event_definition,
|
||||
workflow=my_task.workflow,
|
||||
outer_workflow=my_task.workflow.outer_workflow
|
||||
)
|
||||
|
||||
def reset(self, my_task):
|
||||
my_task._set_internal_data(event_fired=False)
|
||||
|
||||
def _throw(self, event, workflow, outer_workflow, correlations=None):
|
||||
# This method exists because usually we just want to send the event in our
|
||||
# own task spec, but we can't do that for message events.
|
||||
# We also don't have a more sophisticated method for addressing events to
|
||||
# a particular process, but this at least provides a mechanism for distinguishing
|
||||
# between processes and subprocesses.
|
||||
if self.internal:
|
||||
workflow.catch(event)
|
||||
if self.external:
|
||||
outer_workflow.catch(event, correlations)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__
|
||||
|
||||
def serialize(self):
|
||||
return {
|
||||
'classname': self.__class__.__module__ + '.' + self.__class__.__name__,
|
||||
'internal': self.internal,
|
||||
'external': self.external,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, dct):
|
||||
dct.pop('classname')
|
||||
internal, external = dct.pop('internal'), dct.pop('external')
|
||||
obj = cls(**dct)
|
||||
obj.internal, obj.external = internal, external
|
||||
return obj
|
||||
|
||||
class NamedEventDefinition(EventDefinition):
|
||||
"""
|
||||
Extend the base event class to provide a name for the event. Most throw/catch events
|
||||
have names that names that will be used to identify the event.
|
||||
|
||||
:param name: the name of this event
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
super(NamedEventDefinition, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def reset(self, my_task):
|
||||
super(NamedEventDefinition, self).reset(my_task)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.name == other.name
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(NamedEventDefinition, self).serialize()
|
||||
retdict['name'] = self.name
|
||||
return retdict
|
||||
|
||||
|
||||
class CancelEventDefinition(EventDefinition):
|
||||
"""
|
||||
Cancel events are only handled by the outerworkflow, as they can only be used inside
|
||||
of transaction subprocesses.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(CancelEventDefinition, self).__init__()
|
||||
self.internal = False
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Cancel'
|
||||
|
||||
|
||||
class ErrorEventDefinition(NamedEventDefinition):
|
||||
"""
|
||||
Error events can occur only in subprocesses and as subprocess boundary events. They're
|
||||
matched by code rather than name.
|
||||
"""
|
||||
|
||||
def __init__(self, name, error_code=None):
|
||||
super(ErrorEventDefinition, self).__init__(name)
|
||||
self.error_code = error_code
|
||||
self.internal = False
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Error'
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.error_code in [ None, other.error_code ]
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(ErrorEventDefinition, self).serialize()
|
||||
retdict['error_code'] = self.error_code
|
||||
return retdict
|
||||
|
||||
class EscalationEventDefinition(NamedEventDefinition):
|
||||
"""
|
||||
Escalation events have names, though they don't seem to be used for anything. Instead
|
||||
the spec says that the escalation code should be matched.
|
||||
"""
|
||||
|
||||
def __init__(self, name, escalation_code=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param escalation_code: The escalation code this event should
|
||||
react to. If None then all escalations will activate this event.
|
||||
"""
|
||||
super(EscalationEventDefinition, self).__init__(name)
|
||||
self.escalation_code = escalation_code
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Escalation'
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.escalation_code in [ None, other.escalation_code ]
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(EscalationEventDefinition, self).serialize()
|
||||
retdict['escalation_code'] = self.escalation_code
|
||||
return retdict
|
||||
|
||||
|
||||
class CorrelationProperty:
|
||||
"""Rules for generating a correlation key when a message is sent or received."""
|
||||
|
||||
def __init__(self, name, expression, correlation_keys):
|
||||
self.name = name # This is the property name
|
||||
self.expression = expression # This is how it's generated
|
||||
self.correlation_keys = correlation_keys # These are the keys it's used by
|
||||
|
||||
|
||||
class MessageEventDefinition(NamedEventDefinition):
|
||||
"""The default message event."""
|
||||
|
||||
def __init__(self, name, correlation_properties=None):
|
||||
super().__init__(name)
|
||||
self.correlation_properties = correlation_properties or []
|
||||
self.payload = None
|
||||
self.internal = False
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Message'
|
||||
|
||||
def catch(self, my_task, event_definition = None):
|
||||
self.update_internal_data(my_task, event_definition)
|
||||
super(MessageEventDefinition, self).catch(my_task, event_definition)
|
||||
|
||||
def throw(self, my_task):
|
||||
# We can't update our own payload, because if this task is reached again
|
||||
# we have to evaluate it again so we have to create a new event
|
||||
event = MessageEventDefinition(self.name, self.correlation_properties)
|
||||
# Generating a payload unfortunately needs to be handled using custom extensions
|
||||
# However, there needs to be something to apply the correlations to in the
|
||||
# standard case and this is line with the way Spiff works otherwise
|
||||
event.payload = deepcopy(my_task.data)
|
||||
correlations = self.get_correlations(my_task.workflow.script_engine, event.payload)
|
||||
my_task.workflow.correlations.update(correlations)
|
||||
self._throw(event, my_task.workflow, my_task.workflow.outer_workflow, correlations)
|
||||
|
||||
def update_internal_data(self, my_task, event_definition):
|
||||
my_task.internal_data[event_definition.name] = event_definition.payload
|
||||
|
||||
def update_task_data(self, my_task):
|
||||
# I've added this method so that different message implementations can handle
|
||||
# copying their message data into the task
|
||||
payload = my_task.internal_data.get(self.name)
|
||||
if payload is not None:
|
||||
my_task.set_data(**payload)
|
||||
|
||||
def get_correlations(self, script_engine, payload):
|
||||
correlations = {}
|
||||
for property in self.correlation_properties:
|
||||
for key in property.correlation_keys:
|
||||
if key not in correlations:
|
||||
correlations[key] = {}
|
||||
correlations[key][property.name] = script_engine._evaluate(property.expression, payload)
|
||||
return correlations
|
||||
|
||||
|
||||
class NoneEventDefinition(EventDefinition):
|
||||
"""
|
||||
This class defines behavior for NoneEvents. We override throw to do nothing.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.internal, self.external = False, False
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Default'
|
||||
|
||||
def throw(self, my_task):
|
||||
"""It's a 'none' event, so nothing to throw."""
|
||||
pass
|
||||
|
||||
def reset(self, my_task):
|
||||
"""It's a 'none' event, so nothing to reset."""
|
||||
pass
|
||||
|
||||
|
||||
class SignalEventDefinition(NamedEventDefinition):
|
||||
"""The SignalEventDefinition is the implementation of event definition used for Signal Events."""
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Signal'
|
||||
|
||||
class TerminateEventDefinition(EventDefinition):
|
||||
"""The TerminateEventDefinition is the implementation of event definition used for Termination Events."""
|
||||
|
||||
def __init__(self):
|
||||
super(TerminateEventDefinition, self).__init__()
|
||||
self.external = False
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Terminate'
|
||||
|
||||
class TimerEventDefinition(EventDefinition):
|
||||
"""
|
||||
The TimerEventDefinition is the implementation of event definition used for
|
||||
Catching Timer Events (Timer events aren't thrown).
|
||||
"""
|
||||
|
||||
def __init__(self, label, dateTime):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param label: The label of the event. Used for the description.
|
||||
|
||||
:param dateTime: The dateTime expression for the expiry time. This is
|
||||
passed to the Script Engine and must evaluate to a datetime (in the case of
|
||||
a time-date event) or a timedelta (in the case of a duration event).
|
||||
"""
|
||||
super(TimerEventDefinition, self).__init__()
|
||||
self.label = label
|
||||
self.dateTime = dateTime
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Timer'
|
||||
|
||||
def has_fired(self, my_task):
|
||||
"""
|
||||
The Timer is considered to have fired if the evaluated dateTime
|
||||
expression is before datetime.datetime.now()
|
||||
"""
|
||||
dt = my_task.workflow.script_engine.evaluate(my_task, self.dateTime)
|
||||
if isinstance(dt,datetime.timedelta):
|
||||
if my_task._get_internal_data('start_time',None) is not None:
|
||||
start_time = datetime.datetime.strptime(my_task._get_internal_data('start_time',None), self.TIME_FORMAT)
|
||||
elapsed = datetime.datetime.now() - start_time
|
||||
return elapsed > dt
|
||||
else:
|
||||
my_task.internal_data['start_time'] = datetime.datetime.now().strftime(self.TIME_FORMAT)
|
||||
return False
|
||||
|
||||
if dt is None:
|
||||
return False
|
||||
if isinstance(dt, datetime.datetime):
|
||||
if dt.tzinfo:
|
||||
tz = dt.tzinfo
|
||||
now = tz.fromutc(datetime.datetime.utcnow().replace(tzinfo=tz))
|
||||
else:
|
||||
now = datetime.datetime.now()
|
||||
else:
|
||||
# assume type is a date, not datetime
|
||||
now = datetime.date.today()
|
||||
return now > dt
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(TimerEventDefinition, self).serialize()
|
||||
retdict['label'] = self.label
|
||||
retdict['dateTime'] = self.dateTime
|
||||
return retdict
|
||||
|
||||
|
||||
class CycleTimerEventDefinition(EventDefinition):
|
||||
"""
|
||||
The TimerEventDefinition is the implementation of event definition used for
|
||||
Catching Timer Events (Timer events aren't thrown).
|
||||
|
||||
The cycle definition should evaluate to a tuple of
|
||||
(n repetitions, repetition duration)
|
||||
"""
|
||||
def __init__(self, label, cycle_definition):
|
||||
|
||||
super(CycleTimerEventDefinition, self).__init__()
|
||||
self.label = label
|
||||
# The way we're using cycle timers doesn't really align with how the BPMN spec
|
||||
# describes is (the example of "every monday at 9am")
|
||||
# I am not sure why this isn't a subprocess with a repeat count that starts
|
||||
# with a duration timer
|
||||
self.cycle_definition = cycle_definition
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Cycle Timer'
|
||||
|
||||
def has_fired(self, my_task):
|
||||
# We will fire this timer whenever a cycle completes
|
||||
# The task itself will manage counting how many times it fires
|
||||
|
||||
repeat, delta = my_task.workflow.script_engine.evaluate(my_task, self.cycle_definition)
|
||||
|
||||
# This is the first time we've entered this event
|
||||
if my_task.internal_data.get('repeat') is None:
|
||||
my_task.internal_data['repeat'] = repeat
|
||||
if my_task.get_data('repeat_count') is None:
|
||||
# This is now a looping task, and if we use internal data, the repeat count won't persist
|
||||
my_task.set_data(repeat_count=0)
|
||||
|
||||
now = datetime.datetime.now()
|
||||
if my_task._get_internal_data('start_time') is None:
|
||||
start_time = now
|
||||
my_task.internal_data['start_time'] = now.strftime(self.TIME_FORMAT)
|
||||
else:
|
||||
start_time = datetime.datetime.strptime(my_task._get_internal_data('start_time'),self.TIME_FORMAT)
|
||||
|
||||
if my_task.get_data('repeat_count') >= repeat or (now - start_time) < delta:
|
||||
return False
|
||||
return True
|
||||
|
||||
def reset(self, my_task):
|
||||
repeat_count = my_task.get_data('repeat_count')
|
||||
if repeat_count is None:
|
||||
# If this is a boundary event, then repeat count will not have been set
|
||||
my_task.set_data(repeat_count=0)
|
||||
else:
|
||||
my_task.set_data(repeat_count=repeat_count + 1)
|
||||
my_task.internal_data['start_time'] = None
|
||||
super(CycleTimerEventDefinition, self).reset(my_task)
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(CycleTimerEventDefinition, self).serialize()
|
||||
retdict['label'] = self.label
|
||||
retdict['cycle_definition'] = self.cycle_definition
|
||||
return retdict
|
|
@ -0,0 +1,104 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from .event_definitions import MessageEventDefinition, NoneEventDefinition
|
||||
from ..BpmnSpecMixin import BpmnSpecMixin
|
||||
from ....specs.Simple import Simple
|
||||
from ....task import TaskState
|
||||
|
||||
class CatchingEvent(Simple, BpmnSpecMixin):
|
||||
"""Base Task Spec for Catching Event nodes."""
|
||||
|
||||
def __init__(self, wf_spec, name, event_definition, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param event_definition: the EventDefinition that we must wait for.
|
||||
"""
|
||||
super(CatchingEvent, self).__init__(wf_spec, name, **kwargs)
|
||||
self.event_definition = event_definition
|
||||
|
||||
def catches(self, my_task, event_definition, correlations=None):
|
||||
if self.event_definition == event_definition:
|
||||
return all([ correlations.get(key) == my_task.workflow.correlations.get(key) for key in correlations ])
|
||||
else:
|
||||
return False
|
||||
|
||||
def catch(self, my_task, event_definition):
|
||||
"""
|
||||
Catch is called by the workflow when the task has matched an event
|
||||
definition, at which point we can update our task's state.
|
||||
"""
|
||||
self.event_definition.catch(my_task, event_definition)
|
||||
self._update_hook(my_task)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
if my_task.state == TaskState.WAITING and self.event_definition.has_fired(my_task):
|
||||
my_task._ready()
|
||||
super(CatchingEvent, self)._update_hook(my_task)
|
||||
|
||||
def _on_ready(self, my_task):
|
||||
|
||||
# None events don't propogate, so as soon as we're ready, we fire our event
|
||||
if isinstance(self.event_definition, NoneEventDefinition):
|
||||
my_task._set_internal_data(event_fired=True)
|
||||
|
||||
# If we have not seen the event we're waiting for, enter the waiting state
|
||||
if not self.event_definition.has_fired(my_task):
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
super(CatchingEvent, self)._on_ready(my_task)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
|
||||
if isinstance(self.event_definition, MessageEventDefinition):
|
||||
self.event_definition.update_task_data(my_task)
|
||||
self.event_definition.reset(my_task)
|
||||
super(CatchingEvent, self)._on_complete_hook(my_task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, cls)
|
||||
|
||||
|
||||
class ThrowingEvent(Simple, BpmnSpecMixin):
|
||||
"""Base Task Spec for Throwing Event nodes."""
|
||||
|
||||
def __init__(self, wf_spec, name, event_definition, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param event_definition: the EventDefinition to be thrown.
|
||||
"""
|
||||
super(ThrowingEvent, self).__init__(wf_spec, name, **kwargs)
|
||||
self.event_definition = event_definition
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
super(ThrowingEvent, self)._on_complete_hook(my_task)
|
||||
self.event_definition.throw(my_task)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_generic_event(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_generic_event(wf_spec, s_state, cls)
|
|
@ -0,0 +1,265 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2012 Matthew Hampton
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||
from .PythonScriptEngine import PythonScriptEngine
|
||||
from .specs.events.event_types import CatchingEvent
|
||||
from .specs.events.StartEvent import StartEvent
|
||||
from .specs.SubWorkflowTask import CallActivity
|
||||
from ..task import TaskState, Task
|
||||
from ..workflow import Workflow
|
||||
from ..exceptions import WorkflowException
|
||||
|
||||
|
||||
class BpmnMessage:
|
||||
|
||||
def __init__(self, correlations, name, payload):
|
||||
|
||||
self.correlations = correlations or {}
|
||||
self.name = name
|
||||
self.payload = payload
|
||||
|
||||
|
||||
class BpmnWorkflow(Workflow):
|
||||
"""
|
||||
The engine that executes a BPMN workflow. This specialises the standard
|
||||
Spiff Workflow class with a few extra methods and attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, top_level_spec, subprocess_specs=None, name=None, script_engine=None,
|
||||
read_only=False, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param script_engine: set to an extension of PythonScriptEngine if you
|
||||
need a specialised version. Defaults to the script engine of the top
|
||||
most workflow, or to the PythonScriptEngine if none is provided.
|
||||
|
||||
:param read_only: If this parameter is set then the workflow state
|
||||
cannot change. It can only be queried to find out about the current
|
||||
state. This is used in conjunction with the CompactWorkflowSerializer
|
||||
to provide read only access to a previously saved workflow.
|
||||
"""
|
||||
self._busy_with_restore = False
|
||||
super(BpmnWorkflow, self).__init__(top_level_spec, **kwargs)
|
||||
self.name = name or top_level_spec.name
|
||||
self.subprocess_specs = subprocess_specs or {}
|
||||
self.subprocesses = {}
|
||||
self.bpmn_messages = []
|
||||
self.correlations = {}
|
||||
self.__script_engine = script_engine or PythonScriptEngine()
|
||||
self.read_only = read_only
|
||||
|
||||
@property
|
||||
def script_engine(self):
|
||||
# The outermost script engine always takes precedence.
|
||||
# All call activities, sub-workflows and DMNs should use the
|
||||
# workflow engine of the outermost workflow.
|
||||
return self._get_outermost_workflow().__script_engine
|
||||
|
||||
@script_engine.setter
|
||||
def script_engine(self, engine):
|
||||
self.__script_engine = engine
|
||||
|
||||
def create_subprocess(self, my_task, spec_name, name):
|
||||
|
||||
workflow = self._get_outermost_workflow(my_task)
|
||||
subprocess = BpmnWorkflow(
|
||||
workflow.subprocess_specs[spec_name], name=name,
|
||||
read_only=self.read_only,
|
||||
script_engine=self.script_engine,
|
||||
parent=my_task.workflow)
|
||||
workflow.subprocesses[my_task.id] = subprocess
|
||||
return subprocess
|
||||
|
||||
def delete_subprocess(self, my_task):
|
||||
workflow = self._get_outermost_workflow(my_task)
|
||||
del workflow.subprocesses[my_task.id]
|
||||
|
||||
def get_subprocess(self, my_task):
|
||||
workflow = self._get_outermost_workflow(my_task)
|
||||
return workflow.subprocesses.get(my_task.id)
|
||||
|
||||
def add_subprocess(self, spec_name, name):
|
||||
|
||||
new = CallActivity(self.spec, name, spec_name)
|
||||
self.spec.start.connect(new)
|
||||
task = Task(self, new)
|
||||
task._ready()
|
||||
start = self.get_tasks_from_spec_name('Start', workflow=self)[0]
|
||||
start.children.append(task)
|
||||
task.parent = start
|
||||
return self.subprocesses[task.id]
|
||||
|
||||
def _get_outermost_workflow(self, task=None):
|
||||
workflow = task.workflow if task is not None else self
|
||||
while workflow != workflow.outer_workflow:
|
||||
workflow = workflow.outer_workflow
|
||||
return workflow
|
||||
|
||||
def catch(self, event_definition, correlations=None):
|
||||
"""
|
||||
Send an event definition to any tasks that catch it.
|
||||
|
||||
Tasks can always catch events, regardless of their state. The
|
||||
event information is stored in the tasks internal data and processed
|
||||
when the task is reached in the workflow. If a task should only
|
||||
receive messages while it is running (eg a boundary event), the task
|
||||
should call the event_definition's reset method before executing to
|
||||
clear out a stale message.
|
||||
|
||||
:param event_definition: the thrown event
|
||||
"""
|
||||
assert not self.read_only and not self._is_busy_with_restore()
|
||||
|
||||
# Start a subprocess for known specs with start events that catch this
|
||||
# This is total hypocritical of me given how I've argued that specs should
|
||||
# be immutable, but I see no other way of doing this.
|
||||
for name, spec in self.subprocess_specs.items():
|
||||
for task_spec in list(spec.task_specs.values()):
|
||||
if isinstance(task_spec, StartEvent) and task_spec.event_definition == event_definition:
|
||||
subprocess = self.add_subprocess(spec.name, f'{spec.name}_{len(self.subprocesses)}')
|
||||
subprocess.correlations = correlations or {}
|
||||
start = self.get_tasks_from_spec_name(task_spec.name, workflow=subprocess)[0]
|
||||
task_spec.event_definition.catch(start, event_definition)
|
||||
|
||||
# We need to get all the tasks that catch an event before completing any of them
|
||||
# in order to prevent the scenario where multiple boundary events catch the
|
||||
# same event and the first executed cancels the rest
|
||||
tasks = [ t for t in self.get_catching_tasks() if t.task_spec.catches(t, event_definition, correlations or {}) ]
|
||||
for task in tasks:
|
||||
task.task_spec.catch(task, event_definition)
|
||||
|
||||
# Figure out if we need to create an extenal message
|
||||
if len(tasks) == 0 and isinstance(event_definition, MessageEventDefinition):
|
||||
self.bpmn_messages.append(
|
||||
BpmnMessage(correlations, event_definition.name, event_definition.payload))
|
||||
|
||||
def get_bpmn_messages(self):
|
||||
messages = self.bpmn_messages
|
||||
self.bpmn_messages = []
|
||||
return messages
|
||||
|
||||
def catch_bpmn_message(self, name, payload, correlations=None):
|
||||
event_definition = MessageEventDefinition(name)
|
||||
event_definition.payload = payload
|
||||
self.catch(event_definition, correlations=correlations)
|
||||
|
||||
def do_engine_steps(self, exit_at = None):
|
||||
"""
|
||||
Execute any READY tasks that are engine specific (for example, gateways
|
||||
or script tasks). This is done in a loop, so it will keep completing
|
||||
those tasks until there are only READY User tasks, or WAITING tasks
|
||||
left.
|
||||
|
||||
:param exit_at: After executing a task with a name matching this param return the task object
|
||||
"""
|
||||
assert not self.read_only
|
||||
engine_steps = list(
|
||||
[t for t in self.get_tasks(TaskState.READY)
|
||||
if self._is_engine_task(t.task_spec)])
|
||||
while engine_steps:
|
||||
for task in engine_steps:
|
||||
task.complete()
|
||||
if task.task_spec.name == exit_at:
|
||||
return task
|
||||
engine_steps = list(
|
||||
[t for t in self.get_tasks(TaskState.READY)
|
||||
if self._is_engine_task(t.task_spec)])
|
||||
|
||||
def refresh_waiting_tasks(self):
|
||||
"""
|
||||
Refresh the state of all WAITING tasks. This will, for example, update
|
||||
Catching Timer Events whose waiting time has passed.
|
||||
"""
|
||||
assert not self.read_only
|
||||
for my_task in self.get_tasks(TaskState.WAITING):
|
||||
my_task.task_spec._update(my_task)
|
||||
|
||||
def get_tasks_from_spec_name(self, name, workflow=None):
|
||||
return [t for t in self.get_tasks(workflow=workflow) if t.task_spec.name == name]
|
||||
|
||||
def get_tasks(self, state=TaskState.ANY_MASK, workflow=None):
|
||||
tasks = []
|
||||
top = self._get_outermost_workflow()
|
||||
wf = workflow or top
|
||||
for task in Workflow.get_tasks(wf):
|
||||
subprocess = top.subprocesses.get(task.id)
|
||||
if subprocess is not None:
|
||||
tasks.extend(subprocess.get_tasks(state, subprocess))
|
||||
if task._has_state(state):
|
||||
tasks.append(task)
|
||||
return tasks
|
||||
|
||||
def _find_task(self, task_id):
|
||||
if task_id is None:
|
||||
raise WorkflowException(self.spec, 'task_id is None')
|
||||
for task in self.get_tasks():
|
||||
if task.id == task_id:
|
||||
return task
|
||||
raise WorkflowException(self.spec,
|
||||
f'A task with the given task_id ({task_id}) was not found')
|
||||
|
||||
def complete_task_from_id(self, task_id):
|
||||
# I don't even know why we use this stupid function instead of calling task.complete,
|
||||
# since all it does is search the task tree and call the method
|
||||
task = self._find_task(task_id)
|
||||
return task.complete()
|
||||
|
||||
def reset_task_from_id(self, task_id):
|
||||
task = self._find_task(task_id)
|
||||
if task.workflow.last_task and task.workflow.last_task.data:
|
||||
data = task.workflow.last_task.data
|
||||
return task.reset_token(data)
|
||||
|
||||
def get_ready_user_tasks(self,lane=None):
|
||||
"""
|
||||
Returns a list of User Tasks that are READY for user action
|
||||
"""
|
||||
if lane is not None:
|
||||
return [t for t in self.get_tasks(TaskState.READY)
|
||||
if (not self._is_engine_task(t.task_spec))
|
||||
and (t.task_spec.lane == lane)]
|
||||
else:
|
||||
return [t for t in self.get_tasks(TaskState.READY)
|
||||
if not self._is_engine_task(t.task_spec)]
|
||||
|
||||
def get_waiting_tasks(self):
|
||||
"""
|
||||
Returns a list of all WAITING tasks
|
||||
"""
|
||||
return self.get_tasks(TaskState.WAITING)
|
||||
|
||||
def get_catching_tasks(self):
|
||||
return [ task for task in self.get_tasks() if isinstance(task.task_spec, CatchingEvent) ]
|
||||
|
||||
def _is_busy_with_restore(self):
|
||||
if self.outer_workflow == self:
|
||||
return self._busy_with_restore
|
||||
return self.outer_workflow._is_busy_with_restore()
|
||||
|
||||
def _is_engine_task(self, task_spec):
|
||||
return (not hasattr(task_spec, 'is_engine_task') or
|
||||
task_spec.is_engine_task())
|
||||
|
||||
def _task_completed_notify(self, task):
|
||||
assert (not self.read_only) or self._is_busy_with_restore()
|
||||
super(BpmnWorkflow, self)._task_completed_notify(task)
|
||||
|
||||
def _task_cancelled_notify(self, task):
|
||||
assert (not self.read_only) or self._is_busy_with_restore()
|
|
@ -0,0 +1,9 @@
|
|||
# BPMN Parser for the Camunda Modeler
|
||||
|
||||
This package provides support for parsing BPMN diagrams provided by Camunda
|
||||
It is a thin layer on top of the SpiffWorkflow BPMN package with additional
|
||||
tools for handling User Forms.
|
||||
|
||||
The [Camunda Modeler](https://camunda.com/download/modeler/) is a feature-rich
|
||||
open source visual editor for authoring and modifying BPMN Diagrams.
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
from ..specs.UserTask import UserTask
|
||||
from ..parser.UserTaskParser import UserTaskParser
|
||||
from ...bpmn.parser.BpmnParser import full_tag
|
||||
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from SpiffWorkflow.camunda.parser.business_rule_task import BusinessRuleTaskParser
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, IntermediateThrowEvent, StartEvent, IntermediateCatchEvent, BoundaryEvent
|
||||
from .event_parsers import CamundaStartEventParser, CamundaEndEventParser, \
|
||||
CamundaIntermediateCatchEventParser, CamundaIntermediateThrowEventParser, CamundaBoundaryEventParser
|
||||
|
||||
|
||||
class CamundaParser(BpmnDmnParser):
|
||||
|
||||
OVERRIDE_PARSER_CLASSES = {
|
||||
full_tag('userTask'): (UserTaskParser, UserTask),
|
||||
full_tag('startEvent'): (CamundaStartEventParser, StartEvent),
|
||||
full_tag('endEvent'): (CamundaEndEventParser, EndEvent),
|
||||
full_tag('intermediateCatchEvent'): (CamundaIntermediateCatchEventParser, IntermediateCatchEvent),
|
||||
full_tag('intermediateThrowEvent'): (CamundaIntermediateThrowEventParser, IntermediateThrowEvent),
|
||||
full_tag('boundaryEvent'): (CamundaBoundaryEventParser, BoundaryEvent),
|
||||
full_tag('businessRuleTask'): (BusinessRuleTaskParser, BusinessRuleTask),
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
from ...bpmn.parser.TaskParser import TaskParser
|
||||
from ...bpmn.parser.util import xpath_eval
|
||||
from ...camunda.specs.UserTask import Form, FormField, EnumFormField
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class UserTaskParser(TaskParser):
|
||||
"""
|
||||
Base class for parsing User Tasks
|
||||
"""
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, lane=None):
|
||||
super(UserTaskParser, self).__init__(process_parser, spec_class, node, lane)
|
||||
self.xpath = xpath_eval(node, extra_ns={'camunda': CAMUNDA_MODEL_NS})
|
||||
|
||||
def create_task(self):
|
||||
form = self.get_form()
|
||||
return self.spec_class(self.spec, self.get_task_spec_name(), form,
|
||||
lane=self.lane,
|
||||
position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
def get_form(self):
|
||||
"""Camunda provides a simple form builder, this will extract the
|
||||
details from that form and construct a form model from it. """
|
||||
form = Form()
|
||||
try:
|
||||
form.key = self.node.attrib['{' + CAMUNDA_MODEL_NS + '}formKey']
|
||||
except (KeyError):
|
||||
return form
|
||||
for xml_field in self.xpath('.//camunda:formData/camunda:formField'):
|
||||
if xml_field.get('type') == 'enum':
|
||||
field = self.get_enum_field(xml_field)
|
||||
else:
|
||||
field = FormField()
|
||||
|
||||
field.id = xml_field.get('id')
|
||||
field.type = xml_field.get('type')
|
||||
field.label = xml_field.get('label')
|
||||
field.default_value = xml_field.get('defaultValue')
|
||||
|
||||
for child in xml_field:
|
||||
if child.tag == '{' + CAMUNDA_MODEL_NS + '}properties':
|
||||
for p in child:
|
||||
field.add_property(p.get('id'), p.get('value'))
|
||||
|
||||
if child.tag == '{' + CAMUNDA_MODEL_NS + '}validation':
|
||||
for v in child:
|
||||
field.add_validation(v.get('name'), v.get('config'))
|
||||
|
||||
form.add_field(field)
|
||||
return form
|
||||
|
||||
def get_enum_field(self, xml_field):
|
||||
field = EnumFormField()
|
||||
|
||||
for child in xml_field:
|
||||
if child.tag == '{' + CAMUNDA_MODEL_NS + '}value':
|
||||
field.add_option(child.get('id'), child.get('name'))
|
||||
return field
|
|
@ -0,0 +1,38 @@
|
|||
from SpiffWorkflow.bpmn.parser.util import xpath_eval
|
||||
from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser
|
||||
|
||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class BusinessRuleTaskParser(TaskParser):
|
||||
dmn_debug = None
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, lane=None):
|
||||
super(BusinessRuleTaskParser, self).__init__(process_parser, spec_class, node, lane)
|
||||
self.xpath = xpath_eval(self.node, extra_ns={'camunda': CAMUNDA_MODEL_NS})
|
||||
|
||||
def create_task(self):
|
||||
decision_ref = self.get_decision_ref(self.node)
|
||||
return BusinessRuleTask(self.spec, self.get_task_spec_name(),
|
||||
dmnEngine=self.process_parser.parser.get_engine(decision_ref, self.node),
|
||||
lane=self.lane, position=self.position,
|
||||
description=self.node.get('name', None),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_decision_ref(node):
|
||||
return node.attrib['{' + CAMUNDA_MODEL_NS + '}decisionRef']
|
||||
|
||||
|
||||
def _on_trigger(self, my_task):
|
||||
pass
|
||||
|
||||
def serialize(self, serializer, **kwargs):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state, **kwargs):
|
||||
pass
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
from SpiffWorkflow.bpmn.parser.event_parsers import EventDefinitionParser
|
||||
from SpiffWorkflow.bpmn.parser.event_parsers import StartEventParser, EndEventParser, \
|
||||
IntermediateCatchEventParser, IntermediateThrowEventParser, BoundaryEventParser
|
||||
from SpiffWorkflow.camunda.specs.events.event_definitions import MessageEventDefinition
|
||||
from SpiffWorkflow.bpmn.parser.util import one
|
||||
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class CamundaEventDefinitionParser(EventDefinitionParser):
|
||||
|
||||
def parse_message_event(self, message_event):
|
||||
"""Parse a Camunda message event node."""
|
||||
|
||||
message_ref = message_event.get('messageRef')
|
||||
if message_ref:
|
||||
message = one(self.doc_xpath('.//bpmn:message[@id="%s"]' % message_ref))
|
||||
name = message.get('name')
|
||||
correlations = self.get_message_correlations(message_ref)
|
||||
else:
|
||||
name = message_event.getparent().get('name')
|
||||
correlations = {}
|
||||
|
||||
payload = message_event.attrib.get('{' + CAMUNDA_MODEL_NS + '}expression')
|
||||
result_var = message_event.attrib.get('{' + CAMUNDA_MODEL_NS + '}resultVariable')
|
||||
return MessageEventDefinition(name, correlations, payload, result_var)
|
||||
|
||||
|
||||
# This really sucks, but it's still better than copy-pasting a bunch of code a million times
|
||||
# The parser "design" makes it impossible to do anything sensible of intuitive here
|
||||
|
||||
class CamundaStartEventParser(CamundaEventDefinitionParser, StartEventParser):
|
||||
def create_task(self):
|
||||
return StartEventParser.create_task(self)
|
||||
|
||||
class CamundaEndEventParser(CamundaEventDefinitionParser, EndEventParser):
|
||||
def create_task(self):
|
||||
return EndEventParser.create_task(self)
|
||||
|
||||
class CamundaIntermediateCatchEventParser(CamundaEventDefinitionParser, IntermediateCatchEventParser):
|
||||
def create_task(self):
|
||||
return IntermediateCatchEventParser.create_task(self)
|
||||
|
||||
class CamundaIntermediateThrowEventParser(CamundaEventDefinitionParser, IntermediateThrowEventParser):
|
||||
def create_task(self):
|
||||
return IntermediateThrowEventParser.create_task(self)
|
||||
|
||||
class CamundaBoundaryEventParser(CamundaEventDefinitionParser, BoundaryEventParser):
|
||||
def create_task(self):
|
||||
return BoundaryEventParser.create_task(self)
|
|
@ -0,0 +1,2 @@
|
|||
from .task_spec_converters import UserTaskConverter, StartEventConverter, EndEventConverter, \
|
||||
BoundaryEventConverter, IntermediateCatchEventConverter, IntermediateThrowEventConverter
|
|
@ -0,0 +1,88 @@
|
|||
from functools import partial
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, IntermediateThrowEvent, StartEvent, IntermediateCatchEvent, BoundaryEvent
|
||||
from ..specs.events.event_definitions import MessageEventDefinition
|
||||
from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
||||
|
||||
from ..specs.UserTask import UserTask, Form
|
||||
|
||||
class CamundaEventConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, spec_class, data_converter, typename):
|
||||
super().__init__(spec_class, data_converter, typename)
|
||||
self.register(
|
||||
MessageEventDefinition,
|
||||
self.event_definition_to_dict,
|
||||
partial(self.event_defintion_from_dict, MessageEventDefinition)
|
||||
)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
if isinstance(spec, BoundaryEvent):
|
||||
dct['cancel_activity'] = spec.cancel_activity
|
||||
dct['event_definition'] = self.convert(spec.event_definition)
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
dct['event_definition'] = self.restore(dct['event_definition'])
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
def event_definition_to_dict(self, event_definition):
|
||||
dct = super().event_definition_to_dict(event_definition)
|
||||
if isinstance(event_definition, MessageEventDefinition):
|
||||
dct['payload'] = event_definition.payload
|
||||
dct['result_var'] = event_definition.result_var
|
||||
return dct
|
||||
|
||||
|
||||
class StartEventConverter(CamundaEventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(StartEvent, data_converter, typename)
|
||||
|
||||
class EndEventConverter(CamundaEventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(EndEvent, data_converter, typename)
|
||||
|
||||
class BoundaryEventConverter(CamundaEventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(BoundaryEvent, data_converter, typename)
|
||||
|
||||
class IntermediateCatchEventConverter(CamundaEventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(IntermediateCatchEvent, data_converter, typename)
|
||||
|
||||
class IntermediateThrowEventConverter(CamundaEventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(IntermediateThrowEvent, data_converter, typename)
|
||||
|
||||
class UserTaskConverter(CamundaEventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(UserTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct['form'] = self.form_to_dict(spec.form)
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
dct['form'] = Form(init=dct['form'])
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
def form_to_dict(self, form):
|
||||
dct = {'key': form.key, 'fields': []}
|
||||
for field in form.fields:
|
||||
new = {
|
||||
'id': field.id,
|
||||
'default_value': field.default_value,
|
||||
'label': field.label,
|
||||
'type': field.type,
|
||||
'properties': [ prop.__dict__ for prop in field.properties ],
|
||||
'validation': [ val.__dict__ for val in field.validation ],
|
||||
}
|
||||
if field.type == "enum":
|
||||
new['options'] = [ opt.__dict__ for opt in field.options ]
|
||||
dct['fields'].append(new)
|
||||
return dct
|
|
@ -0,0 +1,143 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from ...bpmn.specs.UserTask import UserTask
|
||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
|
||||
|
||||
class UserTask(UserTask, BpmnSpecMixin):
|
||||
|
||||
def __init__(self, wf_spec, name, form, **kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
:param form: the information that needs to be provided by the user,
|
||||
as parsed from the camunda xml file's form details.
|
||||
"""
|
||||
super(UserTask, self).__init__(wf_spec, name, **kwargs)
|
||||
self.form = form
|
||||
|
||||
|
||||
"""
|
||||
Task Spec for a bpmn:userTask node.
|
||||
"""
|
||||
|
||||
def _on_trigger(self, my_task):
|
||||
pass
|
||||
|
||||
def is_engine_task(self):
|
||||
return False
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_user_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_user_task(wf_spec, s_state)
|
||||
|
||||
|
||||
class FormField(object):
|
||||
def __init__(self, form_type="text"):
|
||||
self.id = ""
|
||||
self.type = form_type
|
||||
self.label = ""
|
||||
self.default_value = ""
|
||||
self.properties = []
|
||||
self.validation = []
|
||||
|
||||
def add_property(self, property_id, value):
|
||||
self.properties.append(FormFieldProperty(property_id, value))
|
||||
|
||||
def add_validation(self, name, config):
|
||||
self.validation.append(FormFieldValidation(name, config))
|
||||
|
||||
def get_property(self, property_id):
|
||||
for prop in self.properties:
|
||||
if prop.id == property_id:
|
||||
return prop.value
|
||||
|
||||
def has_property(self, property_id):
|
||||
return self.get_property(property_id) is not None
|
||||
|
||||
def get_validation(self, name):
|
||||
for v in self.validation:
|
||||
if v.name == name:
|
||||
return v.config
|
||||
|
||||
def has_validation(self, name):
|
||||
return self.get_validation(name) is not None
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
class EnumFormField(FormField):
|
||||
def __init__(self):
|
||||
super(EnumFormField, self).__init__("enum")
|
||||
self.options = []
|
||||
|
||||
def add_option(self, option_id, name):
|
||||
self.options.append(EnumFormFieldOption(option_id, name))
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class EnumFormFieldOption:
|
||||
def __init__(self, option_id, name):
|
||||
self.id = option_id
|
||||
self.name = name
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class FormFieldProperty:
|
||||
def __init__(self, property_id, value):
|
||||
self.id = property_id
|
||||
self.value = value
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class FormFieldValidation:
|
||||
def __init__(self, name, config):
|
||||
self.name = name
|
||||
self.config = config
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
|
||||
class Form:
|
||||
def __init__(self,init=None):
|
||||
self.key = ""
|
||||
self.fields = []
|
||||
if init:
|
||||
self.from_dict(init)
|
||||
|
||||
def add_field(self, field):
|
||||
self.fields.append(field)
|
||||
|
||||
def jsonable(self):
|
||||
return self.__dict__
|
||||
|
||||
def from_dict(self,formdict):
|
||||
self.key = formdict['key']
|
||||
for field in formdict['fields']:
|
||||
if field['type'] == 'enum':
|
||||
newfield = EnumFormField()
|
||||
for option in field['options']:
|
||||
newfield.add_option(option['id'], option['name'])
|
||||
else:
|
||||
newfield = FormField()
|
||||
newfield.id = field['id']
|
||||
newfield.default_value = field['default_value']
|
||||
newfield.label = field['label']
|
||||
newfield.type = field['type']
|
||||
for prop in field['properties']:
|
||||
newfield.add_property(prop['id'],prop['value'])
|
||||
for validation in field['validation']:
|
||||
newfield.add_validation(validation['name'],validation['config'])
|
||||
self.add_field(newfield)
|
||||
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||
|
||||
class MessageEventDefinition(MessageEventDefinition):
|
||||
"""
|
||||
Message Events have both a name and a payload.
|
||||
"""
|
||||
|
||||
# It is not entirely clear how the payload is supposed to be handled, so I have
|
||||
# deviated from what the earlier code did as little as possible, but I believe
|
||||
# this should be revisited: for one thing, we're relying on some Camunda-specific
|
||||
# properties.
|
||||
|
||||
def __init__(self, name, correlation_properties=None, payload=None, result_var=None):
|
||||
|
||||
super(MessageEventDefinition, self).__init__(name, correlation_properties)
|
||||
self.payload = payload
|
||||
self.result_var = result_var
|
||||
|
||||
# The BPMN spec says that Messages should not be used within a process; however
|
||||
# our camunda workflows depend on it
|
||||
self.internal = True
|
||||
|
||||
def throw(self, my_task):
|
||||
# We need to evaluate the message payload in the context of this task
|
||||
result = my_task.workflow.script_engine.evaluate(my_task, self.payload)
|
||||
# We can't update our own payload, because if this task is reached again
|
||||
# we have to evaluate it again so we have to create a new event
|
||||
event = MessageEventDefinition(self.name, payload=result, result_var=self.result_var)
|
||||
self._throw(event, my_task.workflow, my_task.workflow.outer_workflow)
|
||||
|
||||
def update_internal_data(self, my_task, event_definition):
|
||||
if event_definition.result_var is None:
|
||||
result_var = f'{my_task.task_spec.name}_Response'
|
||||
else:
|
||||
result_var = event_definition.result_var
|
||||
# Prevent this from conflicting
|
||||
my_task.internal_data[self.name] = {
|
||||
'payload': event_definition.payload,
|
||||
'result_var': result_var
|
||||
}
|
||||
|
||||
def update_task_data(self, my_task):
|
||||
event_data = my_task.internal_data.get(self.name)
|
||||
my_task.data[event_data['result_var']] = event_data['payload']
|
||||
|
||||
def reset(self, my_task):
|
||||
my_task.internal_data.pop('result_var', None)
|
||||
super(MessageEventDefinition, self).reset(my_task)
|
||||
|
||||
def serialize(self):
|
||||
retdict = super().serialize()
|
||||
retdict['payload'] = self.payload
|
||||
retdict['result_var'] = self.result_var
|
||||
return retdict
|
|
@ -0,0 +1,115 @@
|
|||
import logging
|
||||
import re
|
||||
|
||||
from ...util import levenshtein
|
||||
from ...workflow import WorkflowException
|
||||
|
||||
logger = logging.getLogger('spiff.dmn')
|
||||
|
||||
|
||||
class DMNEngine:
|
||||
"""
|
||||
Handles the processing of a decision table.
|
||||
"""
|
||||
|
||||
def __init__(self, decision_table):
|
||||
self.decision_table = decision_table
|
||||
|
||||
def decide(self, task):
|
||||
for rule in self.decision_table.rules:
|
||||
if self.__check_rule(rule, task):
|
||||
return rule
|
||||
|
||||
def __check_rule(self, rule, task):
|
||||
for input_entry in rule.inputEntries:
|
||||
for lhs in input_entry.lhs:
|
||||
if lhs is not None:
|
||||
input_val = DMNEngine.__get_input_val(input_entry, task.data)
|
||||
else:
|
||||
input_val = None
|
||||
try:
|
||||
if not self.evaluate(input_val, lhs, task):
|
||||
return False
|
||||
except NameError as e:
|
||||
# Add a bit of info, re-raise as Name Error
|
||||
raise NameError(str(e) + "Failed to execute "
|
||||
"expression: '%s' is '%s' in the "
|
||||
"Row with annotation '%s'")
|
||||
except WorkflowException as we:
|
||||
raise we
|
||||
except Exception as e:
|
||||
raise Exception("Failed to execute "
|
||||
"expression: '%s' is '%s' in the "
|
||||
"Row with annotation '%s', %s" % (
|
||||
input_val, lhs, rule.description, str(e)))
|
||||
else:
|
||||
# Empty means ignore decision value
|
||||
continue # Check the other operators/columns
|
||||
return True
|
||||
|
||||
def needs_eq(self, script_engine, text):
|
||||
try:
|
||||
# this should work if we can just do a straight equality
|
||||
script_engine.validate(text)
|
||||
return True
|
||||
except SyntaxError:
|
||||
# if we have problems parsing, then we introduce a variable on the left hand side
|
||||
# and try that and see if that parses. If so, then we know that we do not need to
|
||||
# introduce an equality operator later in the dmn
|
||||
script_engine.validate(f'v {text}')
|
||||
return False
|
||||
|
||||
def evaluate(self, input_expr, match_expr, task):
|
||||
"""
|
||||
Here we need to handle a few things such as if it is an equality or if
|
||||
the equality has already been taken care of. For now, we just assume
|
||||
it is equality.
|
||||
|
||||
An optional task can be included if this is being executed in the
|
||||
context of a BPMN task.
|
||||
"""
|
||||
if match_expr is None:
|
||||
return True
|
||||
|
||||
script_engine = task.workflow.script_engine
|
||||
# NB - the question mark allows us to do a double ended test - for
|
||||
# example - our input expr is 5 and the match expr is 4 < ? < 6 -
|
||||
# this should evaluate as 4 < 5 < 6 and it should evaluate as 'True'
|
||||
# NOTE: It should only do this replacement outside of quotes.
|
||||
# for example, provided "This thing?" in quotes, it should not
|
||||
# do the replacement.
|
||||
match_expr = re.sub('(\?)(?=(?:[^\'"]|[\'"][^\'"]*[\'"])*$)', 'dmninputexpr', match_expr)
|
||||
if 'dmninputexpr' in match_expr:
|
||||
external_methods = {
|
||||
'dmninputexpr': script_engine.evaluate(task, input_expr)
|
||||
}
|
||||
return script_engine.evaluate(task, match_expr,
|
||||
external_methods=external_methods)
|
||||
|
||||
# The input expression just has to be something that can be parsed as is by the engine.
|
||||
try:
|
||||
script_engine.validate(input_expr)
|
||||
except Exception as e:
|
||||
raise WorkflowException(f"Input Expression '{input_expr}' is malformed. " + str(e))
|
||||
|
||||
# If we get here, we need to check whether the match expression includes
|
||||
# an operator or if can use '=='
|
||||
needs_eq = self.needs_eq(script_engine, match_expr)
|
||||
expr = input_expr + ' == ' + match_expr if needs_eq else input_expr + match_expr
|
||||
return script_engine.evaluate(task, expr)
|
||||
|
||||
@staticmethod
|
||||
def __get_input_val(input_entry, context):
|
||||
"""
|
||||
The input of the decision method should be an expression, but will
|
||||
fallback to the likely very bad idea of trying to use the label.
|
||||
|
||||
:param inputEntry:
|
||||
:param context: # A dictionary that provides some context/local vars.
|
||||
:return:
|
||||
"""
|
||||
if input_entry.input.expression:
|
||||
return input_entry.input.expression
|
||||
else:
|
||||
# Backwards compatibility
|
||||
return "%r" % context[input_entry.input.label]
|
|
@ -0,0 +1,61 @@
|
|||
import glob
|
||||
|
||||
from ...bpmn.parser.util import xpath_eval
|
||||
from ...bpmn.parser.ValidationException import ValidationException
|
||||
|
||||
from ...bpmn.parser.BpmnParser import BpmnParser
|
||||
from ...dmn.parser.DMNParser import DMNParser
|
||||
from ..engine.DMNEngine import DMNEngine
|
||||
from lxml import etree
|
||||
|
||||
class BpmnDmnParser(BpmnParser):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.dmn_parsers = {}
|
||||
self.dmn_parsers_by_name = {}
|
||||
|
||||
def get_engine(self, decision_ref, node):
|
||||
if decision_ref not in self.dmn_parsers:
|
||||
options = ', '.join(list(self.dmn_parsers.keys()))
|
||||
raise ValidationException(
|
||||
'No DMN Diagram available with id "%s", Available DMN ids are: %s' %(decision_ref, options),
|
||||
node=node, filename='')
|
||||
dmn_parser = self.dmn_parsers[decision_ref]
|
||||
dmn_parser.parse()
|
||||
decision = dmn_parser.decision
|
||||
return DMNEngine(decision.decisionTables[0])
|
||||
|
||||
def add_dmn_xml(self, node, filename=None):
|
||||
"""
|
||||
Add the given lxml representation of the DMN file to the parser's set.
|
||||
"""
|
||||
xpath = xpath_eval(node)
|
||||
dmn_parser = DMNParser(
|
||||
self, node, filename=filename, doc_xpath=xpath)
|
||||
self.dmn_parsers[dmn_parser.get_id()] = dmn_parser
|
||||
self.dmn_parsers_by_name[dmn_parser.get_name()] = dmn_parser
|
||||
|
||||
def add_dmn_file(self, filename):
|
||||
"""
|
||||
Add the given DMN filename to the parser's set.
|
||||
"""
|
||||
self.add_dmn_files([filename])
|
||||
|
||||
def add_dmn_files_by_glob(self, g):
|
||||
"""
|
||||
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
|
||||
parser's set.
|
||||
"""
|
||||
self.add_dmn_files(glob.glob(g))
|
||||
|
||||
def add_dmn_files(self, filenames):
|
||||
"""
|
||||
Add all filenames in the given list to the parser's set.
|
||||
"""
|
||||
for filename in filenames:
|
||||
f = open(filename, 'r')
|
||||
try:
|
||||
self.add_dmn_xml(etree.parse(f).getroot(), filename=filename)
|
||||
finally:
|
||||
f.close()
|
|
@ -0,0 +1,190 @@
|
|||
import ast
|
||||
|
||||
from ...bpmn.parser.util import xpath_eval
|
||||
|
||||
from ...dmn.specs.model import Decision, DecisionTable, InputEntry, \
|
||||
OutputEntry, Input, Output, Rule
|
||||
|
||||
|
||||
def get_dmn_ns(node):
|
||||
"""
|
||||
Returns the namespace definition for the current DMN
|
||||
|
||||
:param node: the XML node for the DMN document
|
||||
"""
|
||||
if 'http://www.omg.org/spec/DMN/20151101/dmn.xsd' in node.nsmap.values():
|
||||
return 'http://www.omg.org/spec/DMN/20151101/dmn.xsd'
|
||||
elif 'https://www.omg.org/spec/DMN/20191111/MODEL/' in node.nsmap.values():
|
||||
return 'https://www.omg.org/spec/DMN/20191111/MODEL/'
|
||||
return None
|
||||
|
||||
|
||||
class DMNParser(object):
|
||||
"""
|
||||
Please note this DMN Parser still needs a lot of work. A few key areas
|
||||
that need to be addressed:
|
||||
1. it assumes that only one decision table exists within a decision
|
||||
2. it is not always name space aware (I fixed the top level, but could be
|
||||
cleaner all the way through.
|
||||
"""
|
||||
|
||||
DT_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
||||
|
||||
def __init__(self, p, node, svg=None, filename=None, doc_xpath=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:param p: the owning BpmnParser instance
|
||||
:param node: the XML node for the DMN document
|
||||
:param svg: the SVG representation of this process as a string
|
||||
(optional)
|
||||
:param filename: the source BMN filename (optional)
|
||||
"""
|
||||
self.parser = p
|
||||
self.node = node
|
||||
self.decision = None
|
||||
self.svg = svg
|
||||
self.filename = filename
|
||||
self.doc_xpath = doc_xpath
|
||||
self.dmn_ns = get_dmn_ns(self.node)
|
||||
self.xpath = xpath_eval(self.node, {'dmn': self.dmn_ns})
|
||||
|
||||
def parse(self):
|
||||
self.decision = self._parse_decision(self.node.findall('{*}decision'))
|
||||
|
||||
def get_id(self):
|
||||
"""
|
||||
Returns the process ID
|
||||
"""
|
||||
return self.node.findall('{*}decision[1]')[0].get('id')
|
||||
|
||||
def get_name(self):
|
||||
"""
|
||||
Returns the process name (or ID, if no name is included in the file)
|
||||
"""
|
||||
return self.node.findall('{*}decision[1]')[0].get('name')
|
||||
|
||||
def _parse_decision(self, root):
|
||||
decision_elements = list(root)
|
||||
if len(decision_elements) == 0:
|
||||
raise Exception('No decisions found')
|
||||
|
||||
if len(decision_elements) > 1:
|
||||
raise Exception('Multiple decisions found')
|
||||
|
||||
decision_element = decision_elements[0]
|
||||
assert decision_element.tag.endswith(
|
||||
'decision'), 'Element %r is not of type "decision"' % (
|
||||
decision_element.tag)
|
||||
|
||||
decision = Decision(decision_element.attrib['id'],
|
||||
decision_element.attrib.get('name', ''))
|
||||
|
||||
# Parse decision tables
|
||||
try:
|
||||
self._parse_decision_tables(decision, decision_element)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Error in Decision '%s': %s" % (decision.name, str(e)))
|
||||
|
||||
return decision
|
||||
|
||||
def _parse_decision_tables(self, decision, decisionElement):
|
||||
for decision_table_element in decisionElement.findall('{*}decisionTable'):
|
||||
decision_table = DecisionTable(decision_table_element.attrib['id'],
|
||||
decision_table_element.attrib.get(
|
||||
'name', ''))
|
||||
decision.decisionTables.append(decision_table)
|
||||
|
||||
# parse inputs
|
||||
self._parse_inputs_outputs(decision_table, decision_table_element)
|
||||
|
||||
def _parse_inputs_outputs(self, decisionTable,
|
||||
decisionTableElement):
|
||||
for element in decisionTableElement:
|
||||
if element.tag.endswith('input'):
|
||||
e_input = self._parse_input(element)
|
||||
decisionTable.inputs.append(e_input)
|
||||
elif element.tag.endswith('output'):
|
||||
output = self._parse_output(element)
|
||||
decisionTable.outputs.append(output)
|
||||
elif element.tag.endswith('rule'):
|
||||
rule = self._parse_rule(decisionTable, element)
|
||||
decisionTable.rules.append(rule)
|
||||
else:
|
||||
raise Exception(
|
||||
'Unknown type in decision table: %r' % element.tag)
|
||||
|
||||
def _parse_input(self, input_element):
|
||||
type_ref = None
|
||||
xpath = xpath_eval(input_element, {'dmn': self.dmn_ns})
|
||||
expression = None
|
||||
for input_expression in xpath('dmn:inputExpression'):
|
||||
type_ref = input_expression.attrib.get('typeRef', '')
|
||||
expression_node = input_expression.find('{' + self.dmn_ns + '}text')
|
||||
if expression_node is not None:
|
||||
expression = expression_node.text
|
||||
|
||||
return Input(input_element.attrib['id'],
|
||||
input_element.attrib.get('label', ''),
|
||||
input_element.attrib.get('name', ''),
|
||||
expression,
|
||||
type_ref)
|
||||
|
||||
def _parse_output(self, outputElement):
|
||||
output = Output(outputElement.attrib['id'],
|
||||
outputElement.attrib.get('label', ''),
|
||||
outputElement.attrib.get('name', ''),
|
||||
outputElement.attrib.get('typeRef', ''))
|
||||
return output
|
||||
|
||||
def _parse_rule(self, decisionTable, ruleElement):
|
||||
rule = Rule(ruleElement.attrib['id'])
|
||||
|
||||
input_idx = 0
|
||||
output_idx = 0
|
||||
for child in ruleElement:
|
||||
# Load description
|
||||
if child.tag.endswith('description'):
|
||||
rule.description = child.text
|
||||
|
||||
# Load input entries
|
||||
elif child.tag.endswith('inputEntry'):
|
||||
input_entry = self._parse_input_output_element(decisionTable,
|
||||
child,
|
||||
InputEntry,
|
||||
input_idx)
|
||||
rule.inputEntries.append(input_entry)
|
||||
input_idx += 1
|
||||
|
||||
# Load output entries
|
||||
elif child.tag.endswith('outputEntry'):
|
||||
output_entry = self._parse_input_output_element(decisionTable,
|
||||
child,
|
||||
OutputEntry,
|
||||
output_idx)
|
||||
rule.outputEntries.append(output_entry)
|
||||
output_idx += 1
|
||||
|
||||
return rule
|
||||
|
||||
def _parse_input_output_element(self, decision_table, element, cls, idx):
|
||||
input_or_output = (
|
||||
decision_table.inputs if cls == InputEntry else decision_table.outputs if cls == OutputEntry else None)[
|
||||
idx]
|
||||
entry = cls(element.attrib['id'], input_or_output)
|
||||
for child in element:
|
||||
if child.tag.endswith('description'):
|
||||
entry.description = child.text
|
||||
elif child.tag.endswith('text'):
|
||||
entry.text = child.text
|
||||
if cls == InputEntry:
|
||||
entry.lhs.append(entry.text)
|
||||
elif cls == OutputEntry:
|
||||
if entry.text and entry.text != '':
|
||||
try:
|
||||
ast.parse(entry.text)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Malformed Output Expression '%s'. %s " % (entry.text, str(e)))
|
||||
return entry
|
|
@ -0,0 +1 @@
|
|||
from .task_spec_converters import BusinessRuleTaskConverter
|
|
@ -0,0 +1,96 @@
|
|||
from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
||||
|
||||
from ..specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ..specs.model import DecisionTable, Rule
|
||||
from ..specs.model import Input, InputEntry, Output, OutputEntry
|
||||
from ..engine.DMNEngine import DMNEngine
|
||||
|
||||
class BusinessRuleTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(BusinessRuleTask, data_converter, typename)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
# We only ever use one decision table
|
||||
dct['decision_table'] = self.decision_table_to_dict(spec.dmnEngine.decision_table)
|
||||
return dct
|
||||
|
||||
def decision_table_to_dict(self, table):
|
||||
return {
|
||||
'id': table.id,
|
||||
'name': table.name,
|
||||
'inputs': [val.__dict__ for val in table.inputs],
|
||||
'outputs': [val.__dict__ for val in table.outputs],
|
||||
'rules': [self.rule_to_dict(rule) for rule in table.rules],
|
||||
}
|
||||
|
||||
def input_entry_to_dict(self, entry):
|
||||
return {
|
||||
'id': entry.id,
|
||||
'input_id': entry.input.id,
|
||||
'description': entry.description,
|
||||
'lhs': entry.lhs,
|
||||
}
|
||||
|
||||
def output_entry_to_dict(self, entry):
|
||||
dct = {
|
||||
'id': entry.id,
|
||||
'output_id': entry.output.id,
|
||||
'description': entry.description,
|
||||
'text': entry.text,
|
||||
}
|
||||
return dct
|
||||
|
||||
def rule_to_dict(self, rule):
|
||||
return {
|
||||
'id': rule.id,
|
||||
'description': rule.description,
|
||||
'input_entries': [self.input_entry_to_dict(entry) for entry in rule.inputEntries],
|
||||
'output_entries': [self.output_entry_to_dict(entry) for entry in rule.outputEntries],
|
||||
}
|
||||
|
||||
def from_dict(self, dct):
|
||||
table = self.decision_table_from_dict(dct.pop('decision_table'))
|
||||
dct['dmnEngine'] = DMNEngine(table)
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
def decision_table_from_dict(self, dct):
|
||||
table = DecisionTable(dct['id'], dct['name'])
|
||||
table.inputs = [ Input(**val) for val in dct['inputs'] ]
|
||||
table.outputs = [ Output(**val) for val in dct['outputs'] ]
|
||||
table.rules = [ self.rule_from_dict(rule, table.inputs, table.outputs)
|
||||
for rule in dct['rules'] ]
|
||||
return table
|
||||
|
||||
def input_entry_from_dict(self, dct, inputs):
|
||||
input_id = dct.pop('input_id')
|
||||
my_input = None
|
||||
for i in inputs:
|
||||
if i.id == input_id:
|
||||
my_input = i
|
||||
entry = InputEntry(dct['id'], my_input)
|
||||
entry.description = dct['description']
|
||||
entry.lhs = dct['lhs']
|
||||
return entry
|
||||
|
||||
def output_entry_from_dict(self, dct, outputs):
|
||||
output_id = dct['output_id']
|
||||
my_output = None
|
||||
for i in outputs:
|
||||
if i.id == output_id:
|
||||
my_output = i
|
||||
entry = OutputEntry(dct['id'], my_output)
|
||||
entry.description = dct['description']
|
||||
entry.text = dct['text']
|
||||
return entry
|
||||
|
||||
def rule_from_dict(self, dct, inputs, outputs):
|
||||
rule = Rule(dct['id'])
|
||||
rule.description = dct['description']
|
||||
rule.inputEntries = [self.input_entry_from_dict(entry, inputs)
|
||||
for entry in dct['input_entries']]
|
||||
rule.outputEntries = [self.output_entry_from_dict(entry, outputs)
|
||||
for entry in dct['output_entries']]
|
||||
return rule
|
|
@ -0,0 +1,45 @@
|
|||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
|
||||
from ...specs import Simple
|
||||
|
||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
from ...util.deep_merge import DeepMerge
|
||||
|
||||
|
||||
class BusinessRuleTask(Simple, BpmnSpecMixin):
|
||||
"""
|
||||
Task Spec for a bpmn:businessTask (DMB Decision Reference) node.
|
||||
"""
|
||||
|
||||
def _on_trigger(self, my_task):
|
||||
pass
|
||||
|
||||
def __init__(self, wf_spec, name, dmnEngine, **kwargs):
|
||||
super().__init__(wf_spec, name, **kwargs)
|
||||
|
||||
self.dmnEngine = dmnEngine
|
||||
self.res = None
|
||||
self.resDict = None
|
||||
|
||||
@property
|
||||
def spec_class(self):
|
||||
return 'Business Rule Task'
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
try:
|
||||
self.res = self.dmnEngine.decide(my_task)
|
||||
if self.res is not None: # it is conceivable that no rules fire.
|
||||
self.resDict = self.res.output_as_dict(my_task)
|
||||
my_task.data = DeepMerge.merge(my_task.data,self.resDict)
|
||||
super(BusinessRuleTask, self)._on_complete_hook(my_task)
|
||||
except Exception as e:
|
||||
raise WorkflowTaskExecException(my_task, str(e))
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_business_rule_task(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(self, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_business_rule_task(wf_spec, s_state)
|
||||
|
||||
|
|
@ -0,0 +1,178 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
from ...util.deep_merge import DeepMerge
|
||||
|
||||
|
||||
class Decision:
|
||||
def __init__(self, id, name):
|
||||
self.id = id
|
||||
self.name = name
|
||||
|
||||
self.decisionTables = []
|
||||
|
||||
class DecisionTable:
|
||||
def __init__(self, id, name):
|
||||
self.id = id
|
||||
self.name = name
|
||||
|
||||
self.inputs = []
|
||||
self.outputs = []
|
||||
self.rules = []
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['name'] = self.name
|
||||
out['inputs'] = [x.serialize() for x in self.inputs]
|
||||
out['outputs'] = [x.serialize() for x in self.outputs]
|
||||
out['rules'] = [x.serialize() for x in self.rules]
|
||||
return out
|
||||
|
||||
def deserialize(self,indict):
|
||||
self.id = indict['id']
|
||||
self.name = indict['name']
|
||||
self.inputs = [Input(**x) for x in indict['inputs']]
|
||||
list(map(lambda x, y: x.deserialize(y), self.inputs, indict['inputs']))
|
||||
self.outputs = [Output(**x) for x in indict['outputs']]
|
||||
self.rules = [Rule(None) for x in indict['rules']]
|
||||
list(map(lambda x, y: x.deserialize(y),self.rules,indict['rules']))
|
||||
|
||||
|
||||
|
||||
|
||||
class Input:
|
||||
def __init__(self, id, label, name, expression, typeRef):
|
||||
self.id = id
|
||||
self.label = label
|
||||
self.name = name
|
||||
self.expression = expression
|
||||
self.typeRef = typeRef
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['label'] = self.label
|
||||
out['name'] = self.name
|
||||
out['expression'] = self.expression
|
||||
out['typeRef'] = self.typeRef
|
||||
return out
|
||||
|
||||
def deserialize(self,indict):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
class InputEntry:
|
||||
def __init__(self, id, input):
|
||||
self.id = id
|
||||
self.input = input
|
||||
|
||||
self.description = ''
|
||||
self.lhs = []
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['input'] = self.input.serialize()
|
||||
out['description'] = self.description
|
||||
out['lhs'] = self.lhs
|
||||
return out
|
||||
|
||||
def deserialize(self, indict):
|
||||
self.id = indict['id']
|
||||
self.description = indict['description']
|
||||
self.lhs = indict['lhs']
|
||||
self.input = Input(**indict['input'])
|
||||
self.input.deserialize(indict['input'])
|
||||
|
||||
class Output:
|
||||
def __init__(self, id, label, name, typeRef):
|
||||
self.id = id
|
||||
self.label = label
|
||||
self.name = name
|
||||
self.typeRef = typeRef
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['label'] = self.label
|
||||
out['name'] = self.name
|
||||
out['typeRef'] = self.typeRef
|
||||
return out
|
||||
|
||||
|
||||
class OutputEntry:
|
||||
def __init__(self, id, output):
|
||||
self.id = id
|
||||
self.output = output
|
||||
|
||||
self.description = ''
|
||||
self.text = ''
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['output'] = self.output.serialize()
|
||||
out['description'] = self.description
|
||||
out['text'] = self.text
|
||||
return out
|
||||
|
||||
def deserialize(self, indict):
|
||||
self.id = indict['id']
|
||||
self.description = indict['description']
|
||||
self.text = indict['text']
|
||||
self.output = Output(**indict['output'])
|
||||
|
||||
|
||||
|
||||
class Rule:
|
||||
def __init__(self, id):
|
||||
self.id = id
|
||||
|
||||
self.description = ''
|
||||
self.inputEntries = []
|
||||
self.outputEntries = []
|
||||
|
||||
def serialize(self):
|
||||
out = {}
|
||||
out['id'] = self.id
|
||||
out['description'] = self.description
|
||||
out['inputEntries'] = [x.serialize() for x in self.inputEntries]
|
||||
out['outputEntries'] = [x.serialize() for x in self.outputEntries]
|
||||
return out
|
||||
|
||||
def deserialize(self,indict):
|
||||
self.id = indict['id']
|
||||
self.description = indict['description']
|
||||
self.inputEntries = [InputEntry(None,None) for x in indict['inputEntries']]
|
||||
list(map(lambda x,y : x.deserialize(y), self.inputEntries, indict['inputEntries']))
|
||||
self.outputEntries = [OutputEntry(None, None) for x in indict['outputEntries']]
|
||||
list(map(lambda x, y: x.deserialize(y), self.outputEntries, indict['outputEntries']))
|
||||
|
||||
def output_as_dict(self, task):
|
||||
script_engine = task.workflow.script_engine
|
||||
out = OrderedDict()
|
||||
for outputEntry in self.outputEntries:
|
||||
# try to use the id, but fall back to label if no name is provided.
|
||||
key = outputEntry.output.name or outputEntry.output.label
|
||||
if hasattr(outputEntry, "text") and outputEntry.text:
|
||||
outvalue = script_engine.evaluate(task, outputEntry.text)
|
||||
else:
|
||||
outvalue = ""
|
||||
if '.' in key: # we need to allow for dot notation in the DMN -
|
||||
# I would use box to do this, but they didn't have a feature to build
|
||||
# a dict based on a dot notation withoug eval
|
||||
# so we build up a dictionary structure based on the key, and let the parent
|
||||
# do a deep merge
|
||||
currentout = {}
|
||||
subkeylist = list(reversed(key.split('.')))
|
||||
for subkey in subkeylist[:-1]:
|
||||
currentout[subkey] = outvalue
|
||||
outvalue = currentout
|
||||
currentout = {}
|
||||
basekey = subkeylist[-1]
|
||||
out[basekey] = DeepMerge.merge(out.get(basekey,{}),outvalue)
|
||||
else:
|
||||
out[key] = outvalue
|
||||
return out
|
|
@ -0,0 +1,76 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2007 Samuel Abels
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
|
||||
class WorkflowException(Exception):
|
||||
"""
|
||||
Base class for all SpiffWorkflow-generated exceptions.
|
||||
"""
|
||||
|
||||
def __init__(self, sender, error):
|
||||
"""
|
||||
Standard exception class.
|
||||
|
||||
:param sender: the task spec that threw the exception
|
||||
:type sender: TaskSpec
|
||||
:param error: a human readable error message
|
||||
:type error: string
|
||||
"""
|
||||
Exception.__init__(self, str(error))
|
||||
# Points to the TaskSpec that generated the exception.
|
||||
self.sender = sender
|
||||
|
||||
@staticmethod
|
||||
def get_task_trace(task):
|
||||
task_trace = [f"{task.task_spec.description} ({task.workflow.spec.file})"]
|
||||
workflow = task.workflow
|
||||
while workflow != workflow.outer_workflow:
|
||||
caller = workflow.name
|
||||
workflow = workflow.outer_workflow
|
||||
task_trace.append(f"{workflow.spec.task_specs[caller].description} ({workflow.spec.file})")
|
||||
return task_trace
|
||||
|
||||
class WorkflowTaskException(WorkflowException):
|
||||
"""WorkflowException that provides task_trace information."""
|
||||
|
||||
def __init__(self, task, error_msg, exception=None):
|
||||
"""
|
||||
Exception initialization.
|
||||
|
||||
:param task: the task that threw the exception
|
||||
:type task: Task
|
||||
:param error_msg: a human readable error message
|
||||
:type error_msg: str
|
||||
:param exception: an exception to wrap, if any
|
||||
:type exception: Exception
|
||||
"""
|
||||
|
||||
self.exception = exception
|
||||
self.task = task
|
||||
|
||||
# If encountered in a sub-workflow, this traces back up the stack
|
||||
# so we can tell how we got to this paticular task, no matter how
|
||||
# deeply nested in sub-workflows it is. Takes the form of:
|
||||
# task-description (file-name)
|
||||
self.task_trace = self.get_task_trace(task)
|
||||
|
||||
super().__init__(task.task_spec, error_msg)
|
||||
|
||||
|
||||
class StorageException(Exception):
|
||||
pass
|
|
@ -0,0 +1,372 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2007 Samuel Abels
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import logging
|
||||
import re
|
||||
|
||||
logger = logging.getLogger('spiff')
|
||||
|
||||
|
||||
class Term(object):
|
||||
|
||||
"""
|
||||
Abstract base class for all operators and expressions.
|
||||
"""
|
||||
pass
|
||||
|
||||
class DotDict(dict):
|
||||
"""dot.notation access to dictionary attributes"""
|
||||
def __getattr__(*args):
|
||||
val = dict.get(*args)
|
||||
return DotDict(val) if type(val) is dict else val
|
||||
__setattr__ = dict.__setitem__
|
||||
__delattr__ = dict.__delitem__
|
||||
|
||||
|
||||
|
||||
class Attrib(Term):
|
||||
|
||||
"""
|
||||
Used for marking a value such that it is recognized to be an
|
||||
attribute name by valueof().
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def serialize(self, serializer):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.serialize_attrib(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.deserialize_attrib(s_state)
|
||||
|
||||
|
||||
class PathAttrib(Term):
|
||||
|
||||
"""
|
||||
Used for marking a value such that it is recognized to be an
|
||||
attribute obtained by evaluating a path by valueof().
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.name = path
|
||||
|
||||
def serialize(self, serializer):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.serialize_pathattrib(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.deserialize_pathattrib(s_state)
|
||||
|
||||
|
||||
class Assign(Term):
|
||||
|
||||
"""
|
||||
Assigns a new value to an attribute. The source may be either
|
||||
a static value, or another attribute.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
left_attribute,
|
||||
right_attribute=None,
|
||||
right=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:type left_attribute: str
|
||||
:param left_attribute: The name of the attribute to which the value
|
||||
is assigned.
|
||||
:type right: object
|
||||
:param right: A static value that, when given, is assigned to
|
||||
left_attribute.
|
||||
:type right_attribute: str
|
||||
:param right_attribute: When given, the attribute with the given
|
||||
name is used as the source (instead of the
|
||||
static value).
|
||||
:type kwargs: dict
|
||||
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
|
||||
"""
|
||||
if not right_attribute and not right:
|
||||
raise ValueError('require argument: right_attribute or right')
|
||||
assert left_attribute is not None
|
||||
self.left_attribute = left_attribute
|
||||
self.right_attribute = right_attribute
|
||||
self.right = right
|
||||
|
||||
def assign(self, from_obj, to_obj):
|
||||
# Fetch the value of the right expression.
|
||||
if self.right is not None:
|
||||
right = self.right
|
||||
else:
|
||||
right = from_obj.get_data(self.right_attribute)
|
||||
to_obj.set_data(**{str(self.left_attribute): right})
|
||||
|
||||
def serialize(self, serializer):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.serialize_assign(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.deserialize_assign(s_state)
|
||||
|
||||
|
||||
def valueof(scope, op, default=None):
|
||||
if op is None:
|
||||
return default
|
||||
elif isinstance(op, Attrib):
|
||||
if op.name not in scope.data:
|
||||
logger.debug("Attrib('{op.name}') not present in task data", extra=scope.log_info({'data': scope.data}))
|
||||
return scope.get_data(op.name, default)
|
||||
elif isinstance(op, PathAttrib):
|
||||
if not op.path:
|
||||
return default
|
||||
parts = op.path.split('/')
|
||||
data = scope.data
|
||||
for part in parts:
|
||||
if part not in data:
|
||||
logger.debug(f"PathAttrib('{op.name}') not present in task data", extra=scope.log_info({'data': scope.data}))
|
||||
return default
|
||||
data = data[part] # move down the path
|
||||
return data
|
||||
else:
|
||||
return op
|
||||
|
||||
def is_number(text):
|
||||
try:
|
||||
x = int(text)
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Operator(Term):
|
||||
|
||||
"""
|
||||
Abstract base class for all operators.
|
||||
"""
|
||||
|
||||
def __init__(self, *args):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
if len(args) == 0:
|
||||
raise TypeError("Too few arguments")
|
||||
self.args = args
|
||||
|
||||
def _get_values(self, task):
|
||||
values = []
|
||||
for arg in self.args:
|
||||
values.append(str(valueof(task, arg)))
|
||||
return values
|
||||
|
||||
def _matches(self, task):
|
||||
raise Exception("Abstract class, do not call")
|
||||
|
||||
def serialize(self, serializer):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.serialize_operator(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
"""
|
||||
Serializes the instance using the provided serializer.
|
||||
|
||||
:type serializer: :class:`SpiffWorkflow.serializer.base.Serializer`
|
||||
:param serializer: The serializer to use.
|
||||
:rtype: object
|
||||
:returns: The serialized object.
|
||||
"""
|
||||
return serializer.deserialize_operator(s_state)
|
||||
|
||||
|
||||
class Equal(Operator):
|
||||
|
||||
"""
|
||||
This class represents the EQUAL operator.
|
||||
"""
|
||||
|
||||
def _matches(self, task):
|
||||
values = self._get_values(task)
|
||||
last = values[0]
|
||||
for value in values:
|
||||
if value != last:
|
||||
return False
|
||||
last = value
|
||||
return True
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_operator_equal(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
return serializer.deserialize_operator_equal(s_state)
|
||||
|
||||
|
||||
class NotEqual(Operator):
|
||||
|
||||
"""
|
||||
This class represents the NOT EQUAL operator.
|
||||
"""
|
||||
|
||||
def _matches(self, task):
|
||||
values = self._get_values(task)
|
||||
last = values[0]
|
||||
for value in values:
|
||||
if value != last:
|
||||
return True
|
||||
last = value
|
||||
return False
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_operator_not_equal(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
return serializer.deserialize_operator_not_equal(s_state)
|
||||
|
||||
|
||||
class GreaterThan(Operator):
|
||||
|
||||
"""
|
||||
This class represents the GREATER THAN operator.
|
||||
"""
|
||||
|
||||
def __init__(self, left, right):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
Operator.__init__(self, left, right)
|
||||
|
||||
def _matches(self, task):
|
||||
left, right = self._get_values(task)
|
||||
return int(left) > int(right)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_operator_greater_than(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
return serializer.deserialize_operator_greater_than(s_state)
|
||||
|
||||
|
||||
class LessThan(Operator):
|
||||
|
||||
"""
|
||||
This class represents the LESS THAN operator.
|
||||
"""
|
||||
|
||||
def __init__(self, left, right):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
Operator.__init__(self, left, right)
|
||||
|
||||
def _matches(self, task):
|
||||
left, right = self._get_values(task)
|
||||
return int(left) < int(right)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_operator_less_than(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
return serializer.deserialize_operator_less_than(s_state)
|
||||
|
||||
|
||||
class Match(Operator):
|
||||
|
||||
"""
|
||||
This class represents the regular expression match operator.
|
||||
"""
|
||||
|
||||
def __init__(self, regex, *args):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
Operator.__init__(self, *args)
|
||||
self.regex = re.compile(regex)
|
||||
|
||||
def _matches(self, task):
|
||||
for value in self._get_values(task):
|
||||
if not self.regex.search(value):
|
||||
return False
|
||||
return True
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_operator_match(self)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, s_state):
|
||||
return serializer.deserialize_operator_match(s_state)
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import object
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
|
||||
class Serializer(object):
|
||||
|
||||
def serialize_workflow_spec(self, wf_spec, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"You must implement the serialize_workflow_spec method.")
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"You must implement the deserialize_workflow_spec method.")
|
||||
|
||||
def serialize_workflow(self, workflow, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"You must implement the serialize_workflow method.")
|
||||
|
||||
def deserialize_workflow(self, s_state, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"You must implement the deserialize_workflow method.")
|
|
@ -0,0 +1,727 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import json
|
||||
from builtins import str
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import pickle
|
||||
from base64 import b64encode, b64decode
|
||||
from ..workflow import Workflow
|
||||
from ..util.impl import get_class
|
||||
from ..task import Task
|
||||
from ..operators import (Attrib, PathAttrib, Equal, NotEqual,
|
||||
Operator, GreaterThan, LessThan, Match)
|
||||
from ..specs import (Cancel, AcquireMutex, CancelTask, Celery, Choose,
|
||||
ExclusiveChoice, Execute, Gate, Join, MultiChoice,
|
||||
MultiInstance, ReleaseMutex, Simple, WorkflowSpec,
|
||||
TaskSpec, SubWorkflow, StartTask, ThreadMerge,
|
||||
ThreadSplit, ThreadStart, Merge, Trigger, LoopResetTask)
|
||||
from .base import Serializer
|
||||
from .exceptions import TaskNotSupportedError, MissingSpecError
|
||||
import warnings
|
||||
|
||||
class DictionarySerializer(Serializer):
|
||||
|
||||
def __init__(self):
|
||||
# When deserializing, this is a set of specs for sub-workflows.
|
||||
# This prevents us from serializing a copy of the same spec many
|
||||
# times, which can create very large files.
|
||||
self.SPEC_STATES = {}
|
||||
|
||||
def serialize_dict(self, thedict):
|
||||
return dict(
|
||||
(str(k), b64encode(pickle.dumps(v,
|
||||
protocol=pickle.HIGHEST_PROTOCOL)))
|
||||
for k, v in list(thedict.items()))
|
||||
|
||||
def deserialize_dict(self, s_state):
|
||||
return dict((k, pickle.loads(b64decode(v)))
|
||||
for k, v in list(s_state.items()))
|
||||
|
||||
def serialize_list(self, thelist):
|
||||
return [b64encode(pickle.dumps(v, protocol=pickle.HIGHEST_PROTOCOL))
|
||||
for v in thelist]
|
||||
|
||||
def deserialize_list(self, s_state):
|
||||
return [pickle.loads(b64decode(v)) for v in s_state]
|
||||
|
||||
def serialize_attrib(self, attrib):
|
||||
return attrib.name
|
||||
|
||||
def deserialize_attrib(self, s_state):
|
||||
return Attrib(s_state)
|
||||
|
||||
def serialize_pathattrib(self, pathattrib):
|
||||
return pathattrib.path
|
||||
|
||||
def deserialize_pathattrib(self, s_state):
|
||||
return PathAttrib(s_state)
|
||||
|
||||
def serialize_operator(self, op):
|
||||
return [self.serialize_arg(a) for a in op.args]
|
||||
|
||||
def deserialize_operator(self, s_state):
|
||||
return [self.deserialize_arg(c) for c in s_state]
|
||||
|
||||
def serialize_operator_equal(self, op):
|
||||
return self.serialize_operator(op)
|
||||
|
||||
def deserialize_operator_equal(self, s_state):
|
||||
return Equal(*[self.deserialize_arg(c) for c in s_state])
|
||||
|
||||
def serialize_operator_not_equal(self, op):
|
||||
return self.serialize_operator(op)
|
||||
|
||||
def deserialize_operator_not_equal(self, s_state):
|
||||
return NotEqual(*[self.deserialize_arg(c) for c in s_state])
|
||||
|
||||
def serialize_operator_greater_than(self, op):
|
||||
return self.serialize_operator(op)
|
||||
|
||||
def deserialize_operator_greater_than(self, s_state):
|
||||
return GreaterThan(*[self.deserialize_arg(c) for c in s_state])
|
||||
|
||||
def serialize_operator_less_than(self, op):
|
||||
return self.serialize_operator(op)
|
||||
|
||||
def deserialize_operator_less_than(self, s_state):
|
||||
return LessThan(*[self.deserialize_arg(c) for c in s_state])
|
||||
|
||||
def serialize_operator_match(self, op):
|
||||
return self.serialize_operator(op)
|
||||
|
||||
def deserialize_operator_match(self, s_state):
|
||||
return Match(*[self.deserialize_arg(c) for c in s_state])
|
||||
|
||||
def serialize_arg(self, arg):
|
||||
if isinstance(arg, Attrib):
|
||||
return 'Attrib', self.serialize_attrib(arg)
|
||||
elif isinstance(arg, PathAttrib):
|
||||
return 'PathAttrib', self.serialize_pathattrib(arg)
|
||||
elif isinstance(arg, Operator):
|
||||
module = arg.__class__.__module__
|
||||
arg_type = module + '.' + arg.__class__.__name__
|
||||
return arg_type, arg.serialize(self)
|
||||
return 'value', arg
|
||||
|
||||
def deserialize_arg(self, s_state):
|
||||
arg_type, arg = s_state
|
||||
if arg_type == 'Attrib':
|
||||
return self.deserialize_attrib(arg)
|
||||
elif arg_type == 'PathAttrib':
|
||||
return self.deserialize_pathattrib(arg)
|
||||
elif arg_type == 'value':
|
||||
return arg
|
||||
arg_cls = get_class(arg_type)
|
||||
ret = arg_cls.deserialize(self, arg)
|
||||
if isinstance(ret,list):
|
||||
return arg_cls(*ret)
|
||||
else:
|
||||
return ret
|
||||
|
||||
def serialize_task_spec(self, spec):
|
||||
s_state = dict(id=spec.id,
|
||||
name=spec.name,
|
||||
description=spec.description,
|
||||
manual=spec.manual,
|
||||
internal=spec.internal,
|
||||
lookahead=spec.lookahead)
|
||||
module_name = spec.__class__.__module__
|
||||
s_state['class'] = module_name + '.' + spec.__class__.__name__
|
||||
s_state['inputs'] = [t.id for t in spec.inputs]
|
||||
s_state['outputs'] = [t.id for t in spec.outputs]
|
||||
s_state['data'] = self.serialize_dict(spec.data)
|
||||
if hasattr(spec, 'position'):
|
||||
s_state['position'] = self.serialize_dict(spec.position)
|
||||
|
||||
s_state['defines'] = self.serialize_dict(spec.defines)
|
||||
s_state['pre_assign'] = self.serialize_list(spec.pre_assign)
|
||||
s_state['post_assign'] = self.serialize_list(spec.post_assign)
|
||||
s_state['locks'] = spec.locks[:]
|
||||
|
||||
# Note: Events are not serialized; this is documented in
|
||||
# the TaskSpec API docs.
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_task_spec(self, wf_spec, s_state, spec):
|
||||
spec.id = s_state.get('id', None)
|
||||
spec.description = s_state.get('description', '')
|
||||
spec.manual = s_state.get('manual', False)
|
||||
spec.internal = s_state.get('internal', False)
|
||||
spec.lookahead = s_state.get('lookahead', 2)
|
||||
|
||||
spec.data = self.deserialize_dict(s_state.get('data', {}))
|
||||
if 'position' in s_state.keys():
|
||||
spec.position = self.deserialize_dict(s_state.get('position', {}))
|
||||
spec.defines = self.deserialize_dict(s_state.get('defines', {}))
|
||||
spec.pre_assign = self.deserialize_list(s_state.get('pre_assign', []))
|
||||
spec.post_assign = self.deserialize_list(
|
||||
s_state.get('post_assign', []))
|
||||
spec.locks = s_state.get('locks', [])[:]
|
||||
# We can't restore inputs and outputs yet because they may not be
|
||||
# deserialized yet. So keep the names, and resolve them in the end.
|
||||
spec.inputs = s_state.get('inputs', [])[:]
|
||||
spec.outputs = s_state.get('outputs', [])[:]
|
||||
|
||||
return spec
|
||||
|
||||
def serialize_acquire_mutex(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['mutex'] = spec.mutex
|
||||
return s_state
|
||||
|
||||
def deserialize_acquire_mutex(self, wf_spec, s_state):
|
||||
spec = AcquireMutex(wf_spec, s_state['name'], s_state['mutex'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
spec.mutex = s_state['mutex']
|
||||
return spec
|
||||
|
||||
def serialize_cancel(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['cancel_successfully'] = spec.cancel_successfully
|
||||
return s_state
|
||||
|
||||
def deserialize_cancel(self, wf_spec, s_state):
|
||||
spec = Cancel(wf_spec, s_state['name'],
|
||||
success=s_state.get('cancel_successfully', False))
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_cancel_task(self, spec):
|
||||
return self.serialize_trigger(spec)
|
||||
|
||||
def deserialize_cancel_task(self, wf_spec, s_state):
|
||||
spec = CancelTask(wf_spec,
|
||||
s_state['name'],
|
||||
s_state['context'],
|
||||
times=self.deserialize_arg(s_state['times']))
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_celery(self, spec):
|
||||
args = self.serialize_list(spec.args)
|
||||
kwargs = self.serialize_dict(spec.kwargs)
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['call'] = spec.call
|
||||
s_state['args'] = args
|
||||
s_state['kwargs'] = kwargs
|
||||
s_state['result_key'] = spec.result_key
|
||||
return s_state
|
||||
|
||||
def deserialize_celery(self, wf_spec, s_state):
|
||||
args = self.deserialize_list(s_state['args'])
|
||||
kwargs = self.deserialize_dict(s_state.get('kwargs', {}))
|
||||
spec = Celery(wf_spec, s_state['name'], s_state['call'],
|
||||
call_args=args,
|
||||
result_key=s_state['result_key'],
|
||||
**kwargs)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec)
|
||||
return spec
|
||||
|
||||
def serialize_choose(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['context'] = spec.context
|
||||
# despite the various documentation suggesting that choice ought to be
|
||||
# a collection of objects, here it is a collection of strings. The
|
||||
# handler in MultiChoice.py converts it to TaskSpecs. So instead of:
|
||||
# s_state['choice'] = [c.name for c in spec.choice]
|
||||
# we have:
|
||||
s_state['choice'] = spec.choice
|
||||
return s_state
|
||||
|
||||
def deserialize_choose(self, wf_spec, s_state):
|
||||
spec = Choose(wf_spec,
|
||||
s_state['name'],
|
||||
s_state['context'],
|
||||
s_state['choice'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
|
||||
def serialize_exclusive_choice(self, spec):
|
||||
s_state = self.serialize_multi_choice(spec)
|
||||
s_state['default_task_spec'] = spec.default_task_spec
|
||||
return s_state
|
||||
|
||||
def deserialize_exclusive_choice(self, wf_spec, s_state):
|
||||
spec = ExclusiveChoice(wf_spec, s_state['name'])
|
||||
self.deserialize_multi_choice(wf_spec, s_state, spec=spec)
|
||||
spec.default_task_spec = s_state['default_task_spec']
|
||||
return spec
|
||||
|
||||
def serialize_execute(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['args'] = spec.args
|
||||
return s_state
|
||||
|
||||
def deserialize_execute(self, wf_spec, s_state):
|
||||
spec = Execute(wf_spec, s_state['name'], s_state['args'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_gate(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['context'] = spec.context
|
||||
return s_state
|
||||
|
||||
def deserialize_gate(self, wf_spec, s_state):
|
||||
spec = Gate(wf_spec, s_state['name'], s_state['context'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_loop_reset_task(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['destination_id'] = spec.destination_id
|
||||
s_state['destination_spec_name'] = spec.destination_spec_name
|
||||
return s_state
|
||||
|
||||
def deserialize_loop_reset_task(self, wf_spec, s_state):
|
||||
spec = LoopResetTask(wf_spec, s_state['name'], s_state['destination_id'],
|
||||
s_state['destination_spec_name'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_join(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['split_task'] = spec.split_task
|
||||
s_state['threshold'] = b64encode(
|
||||
pickle.dumps(spec.threshold, protocol=pickle.HIGHEST_PROTOCOL))
|
||||
s_state['cancel_remaining'] = spec.cancel_remaining
|
||||
return s_state
|
||||
|
||||
def deserialize_join(self, wf_spec, s_state, cls=Join):
|
||||
if isinstance(s_state['threshold'],dict):
|
||||
byte_payload = s_state['threshold']['__bytes__']
|
||||
else:
|
||||
byte_payload = s_state['threshold']
|
||||
spec = cls(wf_spec,
|
||||
s_state['name'],
|
||||
split_task=s_state['split_task'],
|
||||
threshold=pickle.loads(b64decode(byte_payload)),
|
||||
cancel=s_state['cancel_remaining'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_multi_choice(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['cond_task_specs'] = thestate = []
|
||||
for condition, spec_name in spec.cond_task_specs:
|
||||
cond = self.serialize_arg(condition)
|
||||
thestate.append((cond, spec_name))
|
||||
# spec.choice is actually a list of strings in MultiChoice: see
|
||||
# _predict_hook. So, instead of
|
||||
# s_state['choice'] = spec.choice and spec.choice.name or None
|
||||
s_state['choice'] = spec.choice or None
|
||||
return s_state
|
||||
|
||||
def deserialize_multi_choice(self, wf_spec, s_state, spec=None):
|
||||
if spec is None:
|
||||
spec = MultiChoice(wf_spec, s_state['name'])
|
||||
if s_state.get('choice') is not None:
|
||||
# this is done in _predict_hook: it's kept as a string for now.
|
||||
# spec.choice = wf_spec.get_task_spec_from_name(s_state['choice'])
|
||||
spec.choice = s_state['choice']
|
||||
for cond, spec_name in s_state['cond_task_specs']:
|
||||
condition = self.deserialize_arg(cond)
|
||||
spec.cond_task_specs.append((condition, spec_name))
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_multi_instance(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
# here we need to add in all of the things that would get serialized
|
||||
# for other classes that the MultiInstance could be -
|
||||
#
|
||||
|
||||
if isinstance(spec, SubWorkflow):
|
||||
br_state = self.serialize_sub_workflow(spec)
|
||||
s_state['file'] = br_state['file']
|
||||
s_state['in_assign'] = br_state['in_assign']
|
||||
s_state['out_assign'] = br_state['out_assign']
|
||||
|
||||
s_state['times'] = self.serialize_arg(spec.times)
|
||||
s_state['prevtaskclass'] = spec.prevtaskclass
|
||||
return s_state
|
||||
|
||||
def deserialize_multi_instance(self, wf_spec, s_state, cls=None):
|
||||
if cls == None:
|
||||
cls = MultiInstance(wf_spec,
|
||||
s_state['name'],
|
||||
times=self.deserialize_arg(s_state['times']))
|
||||
if isinstance(s_state['times'],list):
|
||||
s_state['times'] = self.deserialize_arg(s_state['times'])
|
||||
cls.times = s_state['times']
|
||||
if isinstance(cls, SubWorkflow):
|
||||
if s_state.get('file'):
|
||||
cls.file = self.deserialize_arg(s_state['file'])
|
||||
else:
|
||||
cls.file = None
|
||||
cls.in_assign = self.deserialize_list(s_state['in_assign'])
|
||||
cls.out_assign = self.deserialize_list(s_state['out_assign'])
|
||||
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=cls)
|
||||
return cls
|
||||
|
||||
def serialize_release_mutex(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['mutex'] = spec.mutex
|
||||
return s_state
|
||||
|
||||
def deserialize_release_mutex(self, wf_spec, s_state):
|
||||
spec = ReleaseMutex(wf_spec, s_state['name'], s_state['mutex'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_simple(self, spec):
|
||||
assert isinstance(spec, TaskSpec)
|
||||
return self.serialize_task_spec(spec)
|
||||
|
||||
def deserialize_simple(self, wf_spec, s_state):
|
||||
assert isinstance(wf_spec, WorkflowSpec)
|
||||
spec = Simple(wf_spec, s_state['name'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
|
||||
def deserialize_generic(self, wf_spec, s_state,newclass):
|
||||
assert isinstance(wf_spec, WorkflowSpec)
|
||||
spec = newclass(wf_spec, s_state['name'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_start_task(self, spec):
|
||||
return self.serialize_task_spec(spec)
|
||||
|
||||
def deserialize_start_task(self, wf_spec, s_state):
|
||||
spec = StartTask(wf_spec)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_sub_workflow(self, spec):
|
||||
warnings.warn("SubWorkflows cannot be safely serialized as they only" +
|
||||
" store a reference to the subworkflow specification " +
|
||||
" as a path to an external XML file.")
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['file'] = spec.file
|
||||
s_state['in_assign'] = self.serialize_list(spec.in_assign)
|
||||
s_state['out_assign'] = self.serialize_list(spec.out_assign)
|
||||
return s_state
|
||||
|
||||
def deserialize_sub_workflow(self, wf_spec, s_state):
|
||||
warnings.warn("SubWorkflows cannot be safely deserialized as they " +
|
||||
"only store a reference to the subworkflow " +
|
||||
"specification as a path to an external XML file.")
|
||||
spec = SubWorkflow(wf_spec, s_state['name'], s_state['file'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
spec.in_assign = self.deserialize_list(s_state['in_assign'])
|
||||
spec.out_assign = self.deserialize_list(s_state['out_assign'])
|
||||
return spec
|
||||
|
||||
def serialize_thread_merge(self, spec):
|
||||
return self.serialize_join(spec)
|
||||
|
||||
def deserialize_thread_merge(self, wf_spec, s_state):
|
||||
spec = ThreadMerge(wf_spec, s_state['name'], s_state['split_task'])
|
||||
# while ThreadMerge is a Join, the _deserialise_join isn't what we want
|
||||
# here: it makes a join from scratch which we don't need (the
|
||||
# ThreadMerge constructor does it all). Just task_spec it.
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_thread_split(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['times'] = self.serialize_arg(spec.times)
|
||||
return s_state
|
||||
|
||||
def deserialize_thread_split(self, wf_spec, s_state):
|
||||
spec = ThreadSplit(wf_spec,
|
||||
s_state['name'],
|
||||
times=self.deserialize_arg(s_state['times']),
|
||||
suppress_threadstart_creation=True)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_thread_start(self, spec):
|
||||
return self.serialize_task_spec(spec)
|
||||
|
||||
def deserialize_thread_start(self, wf_spec, s_state):
|
||||
spec = ThreadStart(wf_spec)
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def deserialize_merge(self, wf_spec, s_state):
|
||||
spec = Merge(wf_spec, s_state['name'], s_state['split_task'])
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_trigger(self, spec):
|
||||
s_state = self.serialize_task_spec(spec)
|
||||
s_state['context'] = spec.context
|
||||
s_state['times'] = self.serialize_arg(spec.times)
|
||||
s_state['queued'] = spec.queued
|
||||
return s_state
|
||||
|
||||
def deserialize_trigger(self, wf_spec, s_state):
|
||||
spec = Trigger(wf_spec,
|
||||
s_state['name'],
|
||||
s_state['context'],
|
||||
self.deserialize_arg(s_state['times']))
|
||||
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
|
||||
return spec
|
||||
|
||||
def serialize_workflow_spec(self, spec, **kwargs):
|
||||
s_state = dict(name=spec.name,
|
||||
description=spec.description,
|
||||
file=spec.file)
|
||||
|
||||
if 'Root' not in spec.task_specs:
|
||||
# This is to fix up the case when we
|
||||
# load in a task spec and there is no root object.
|
||||
# it causes problems when we deserialize and then re-serialize
|
||||
# because the deserialize process adds a root.
|
||||
root = Simple(spec, 'Root')
|
||||
spec.task_specs['Root'] = root
|
||||
|
||||
mylist = [(k, v.serialize(self)) for k, v in list(spec.task_specs.items())]
|
||||
|
||||
# As we serialize back up, keep only one copy of any sub_workflow
|
||||
s_state['sub_workflows'] = {}
|
||||
for name, task in mylist:
|
||||
if 'spec' in task:
|
||||
spec = json.loads(task['spec'])
|
||||
if 'sub_workflows' in spec:
|
||||
s_state['sub_workflows'].update(spec['sub_workflows'])
|
||||
del spec['sub_workflows']
|
||||
if spec['name'] not in s_state['sub_workflows']:
|
||||
s_state['sub_workflows'][spec['name']] = json.dumps(spec)
|
||||
task['spec_name'] = spec['name']
|
||||
del task['spec']
|
||||
|
||||
if hasattr(spec,'end'):
|
||||
s_state['end']=spec.end.id
|
||||
s_state['task_specs'] = dict(mylist)
|
||||
return s_state
|
||||
|
||||
def _deserialize_workflow_spec_task_spec(self, spec, task_spec, name):
|
||||
task_spec.inputs = [spec.get_task_spec_from_id(t) for t in task_spec.inputs]
|
||||
task_spec.outputs = [spec.get_task_spec_from_id(t) for t in task_spec.outputs]
|
||||
|
||||
def _prevtaskclass_bases(self, oldtask):
|
||||
return (oldtask)
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, **kwargs):
|
||||
spec = WorkflowSpec(s_state['name'], filename=s_state['file'])
|
||||
spec.description = s_state['description']
|
||||
# Handle Start Task
|
||||
spec.start = None
|
||||
|
||||
# Store all sub-workflows so they can be referenced.
|
||||
if 'sub_workflows' in s_state:
|
||||
# Hate the whole json dumps thing, why do we do this?
|
||||
self.SPEC_STATES.update(s_state['sub_workflows'])
|
||||
|
||||
del spec.task_specs['Start']
|
||||
start_task_spec_state = s_state['task_specs']['Start']
|
||||
start_task_spec = StartTask.deserialize(
|
||||
self, spec, start_task_spec_state)
|
||||
spec.start = start_task_spec
|
||||
spec.task_specs['Start'] = start_task_spec
|
||||
for name, task_spec_state in list(s_state['task_specs'].items()):
|
||||
if name == 'Start':
|
||||
continue
|
||||
prevtask = task_spec_state.get('prevtaskclass', None)
|
||||
if prevtask:
|
||||
oldtask = get_class(prevtask)
|
||||
task_spec_cls = type(task_spec_state['class'],
|
||||
self._prevtaskclass_bases(oldtask), {})
|
||||
else:
|
||||
task_spec_cls = get_class(task_spec_state['class'])
|
||||
task_spec = task_spec_cls.deserialize(self, spec, task_spec_state)
|
||||
spec.task_specs[name] = task_spec
|
||||
|
||||
for name, task_spec in list(spec.task_specs.items()):
|
||||
self._deserialize_workflow_spec_task_spec(spec, task_spec, name)
|
||||
|
||||
if s_state.get('end', None):
|
||||
spec.end = spec.get_task_spec_from_id(s_state['end'])
|
||||
|
||||
assert spec.start is spec.get_task_spec_from_name('Start')
|
||||
return spec
|
||||
|
||||
def serialize_workflow(self, workflow, include_spec=True, **kwargs):
|
||||
|
||||
assert isinstance(workflow, Workflow)
|
||||
s_state = dict()
|
||||
if include_spec:
|
||||
s_state['wf_spec'] = self.serialize_workflow_spec(workflow.spec,
|
||||
**kwargs)
|
||||
|
||||
# data
|
||||
s_state['data'] = self.serialize_dict(workflow.data)
|
||||
|
||||
# last_node
|
||||
value = workflow.last_task
|
||||
s_state['last_task'] = value.id if value is not None else None
|
||||
|
||||
# outer_workflow
|
||||
# s_state['outer_workflow'] = workflow.outer_workflow.id
|
||||
|
||||
# success
|
||||
s_state['success'] = workflow.success
|
||||
|
||||
# task_tree
|
||||
s_state['task_tree'] = self.serialize_task(workflow.task_tree)
|
||||
|
||||
return s_state
|
||||
|
||||
def deserialize_workflow(self, s_state, wf_class=Workflow,
|
||||
read_only=False, wf_spec=None, **kwargs):
|
||||
"""It is possible to override the workflow class, and specify a
|
||||
workflow_spec, otherwise the spec is assumed to be serialized in the
|
||||
s_state['wf_spec']"""
|
||||
|
||||
if wf_spec is None:
|
||||
wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs)
|
||||
workflow = wf_class(wf_spec)
|
||||
|
||||
workflow.read_only = read_only
|
||||
|
||||
# data
|
||||
workflow.data = self.deserialize_dict(s_state['data'])
|
||||
|
||||
# outer_workflow
|
||||
# workflow.outer_workflow =
|
||||
# find_workflow_by_id(remap_workflow_id(s_state['outer_workflow']))
|
||||
|
||||
# success
|
||||
workflow.success = s_state['success']
|
||||
|
||||
# workflow
|
||||
workflow.spec = wf_spec
|
||||
|
||||
# task_tree
|
||||
workflow.task_tree = self.deserialize_task(
|
||||
workflow, s_state['task_tree'])
|
||||
|
||||
# Re-connect parents
|
||||
tasklist = list(workflow.get_tasks())
|
||||
for task in tasklist:
|
||||
task.parent = workflow.get_task(task.parent,tasklist)
|
||||
|
||||
# last_task
|
||||
workflow.last_task = workflow.get_task(s_state['last_task'],tasklist)
|
||||
|
||||
# task_mapping
|
||||
workflow.update_task_mapping()
|
||||
|
||||
return workflow
|
||||
|
||||
|
||||
def serialize_task(self, task, skip_children=False, allow_subs=False):
|
||||
"""
|
||||
:param allow_subs: Allows sub-serialization to take place, otherwise
|
||||
assumes that the subworkflow is stored in internal data and raises an error.
|
||||
"""
|
||||
|
||||
assert isinstance(task, Task)
|
||||
|
||||
# Please note, the BPMN Serializer DOES allow sub-workflows. This is
|
||||
# for backwards compatibility and support of the original parsers.
|
||||
if not allow_subs and isinstance(task.task_spec, SubWorkflow):
|
||||
raise TaskNotSupportedError(
|
||||
"Subworkflow tasks cannot be serialized (due to their use of" +
|
||||
" internal_data to store the subworkflow).")
|
||||
|
||||
s_state = dict()
|
||||
|
||||
# id
|
||||
s_state['id'] = task.id
|
||||
|
||||
# workflow
|
||||
s_state['workflow_name'] = task.workflow.name
|
||||
|
||||
# parent
|
||||
s_state['parent'] = task.parent.id if task.parent is not None else None
|
||||
|
||||
# children
|
||||
if not skip_children:
|
||||
s_state['children'] = [
|
||||
self.serialize_task(child) for child in task.children]
|
||||
|
||||
# state
|
||||
s_state['state'] = task.state
|
||||
s_state['triggered'] = task.triggered
|
||||
|
||||
# task_spec
|
||||
s_state['task_spec'] = task.task_spec.name
|
||||
|
||||
# last_state_change
|
||||
s_state['last_state_change'] = task.last_state_change
|
||||
|
||||
# data
|
||||
s_state['data'] = self.serialize_dict(task.data)
|
||||
|
||||
# internal_data
|
||||
s_state['internal_data'] = task.internal_data
|
||||
|
||||
return s_state
|
||||
|
||||
|
||||
def deserialize_task(self, workflow, s_state):
|
||||
assert isinstance(workflow, Workflow)
|
||||
splits = s_state['task_spec'].split('_')
|
||||
oldtaskname = s_state['task_spec']
|
||||
task_spec = workflow.get_task_spec_from_name(oldtaskname)
|
||||
if task_spec is None:
|
||||
raise MissingSpecError("Unknown task spec: " + oldtaskname)
|
||||
task = Task(workflow, task_spec)
|
||||
|
||||
if getattr(task_spec,'isSequential',False) and \
|
||||
s_state['internal_data'].get('splits') is not None:
|
||||
task.task_spec.expanded = s_state['internal_data']['splits']
|
||||
|
||||
|
||||
# id
|
||||
task.id = s_state['id']
|
||||
|
||||
# parent
|
||||
# as the task_tree might not be complete yet
|
||||
# keep the ids so they can be processed at the end
|
||||
task.parent = s_state['parent']
|
||||
|
||||
# children
|
||||
task.children = self._deserialize_task_children(task, s_state)
|
||||
|
||||
# state
|
||||
task._state = s_state['state']
|
||||
task.triggered = s_state['triggered']
|
||||
|
||||
# last_state_change
|
||||
task.last_state_change = s_state['last_state_change']
|
||||
|
||||
# data
|
||||
task.data = self.deserialize_dict(s_state['data'])
|
||||
|
||||
# internal_data
|
||||
task.internal_data = s_state['internal_data']
|
||||
return task
|
||||
|
||||
def _deserialize_task_children(self, task, s_state):
|
||||
"""This may need to be overridden if you need to support
|
||||
deserialization of sub-workflows"""
|
||||
return [self.deserialize_task(task.workflow, c)
|
||||
for c in s_state['children']]
|
|
@ -0,0 +1,81 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
# requires: https://github.com/stricaud/gvgen
|
||||
import gvgen
|
||||
from .base import Serializer
|
||||
|
||||
|
||||
class dotVisualizer(Serializer):
|
||||
|
||||
def serialize_workflow_spec(self, wf_spec):
|
||||
nodes = set()
|
||||
linked = set()
|
||||
graph = gvgen.GvGen()
|
||||
parent = graph.newItem("Workflow")
|
||||
|
||||
# these built in shapes are available:
|
||||
# http://www.graphviz.org/doc/info/shapes.html
|
||||
graph.styleAppend("Cancel", "shape", "oval")
|
||||
graph.styleAppend("CancelTask", "shape", "oval")
|
||||
graph.styleAppend("Choose", "shape", "diamond")
|
||||
graph.styleAppend("ExclusiveChoice", "shape", "diamond")
|
||||
graph.styleAppend("Execute", "shape", "rect")
|
||||
graph.styleAppend("Gate", "shape", "trapezium")
|
||||
graph.styleAppend("Join", "shape", "invtriangle")
|
||||
graph.styleAppend("Merge", "shape", "invtriangle")
|
||||
graph.styleAppend("MultiChoice", "shape", "diamond")
|
||||
graph.styleAppend("MultiInstance", "shape", "box")
|
||||
graph.styleAppend("ReleaseMutex", "shape", "diamond")
|
||||
graph.styleAppend("Simple", "shape", "rect")
|
||||
graph.styleAppend("StartTask", "shape", "oval")
|
||||
graph.styleAppend("SubWorkflow", "shape", "invhouse")
|
||||
graph.styleAppend("ThreadMerge", "shape", "invtriangle")
|
||||
graph.styleAppend("ThreadSplit", "shape", "triangle")
|
||||
graph.styleAppend("ThreadStart", "shape", "oval")
|
||||
graph.styleAppend("Transform", "shape", "rect")
|
||||
graph.styleAppend("Trigger", "shape", "oval")
|
||||
|
||||
# build graph with all the nodes first
|
||||
def recurisvelyAddNodes(task_spec):
|
||||
if task_spec in nodes:
|
||||
return
|
||||
task_spec.gv = graph.newItem(task_spec.name, parent)
|
||||
# add a default style for this class so that if we don't have one
|
||||
# when we apply it doesn't break the GvGen library
|
||||
graph.styleAppend(task_spec.__class__.__name__, "ignore", "this")
|
||||
graph.styleApply(task_spec.__class__.__name__, task_spec.gv)
|
||||
nodes.add(task_spec)
|
||||
sub_specs = ([task_spec.spec.start] if hasattr(
|
||||
task_spec, 'spec') else []) + task_spec.outputs
|
||||
for t in sub_specs:
|
||||
recurisvelyAddNodes(t)
|
||||
|
||||
# then link all the nodes together
|
||||
def recursive_linking(task_spec):
|
||||
if task_spec in linked:
|
||||
return
|
||||
linked.add(task_spec)
|
||||
sub_specs = ([task_spec.spec.start] if hasattr(
|
||||
task_spec, 'spec') else []) + task_spec.outputs
|
||||
for i, t in enumerate(sub_specs):
|
||||
graph.newLink(task_spec.gv, t.gv)
|
||||
recursive_linking(t)
|
||||
|
||||
recurisvelyAddNodes(wf_spec.start)
|
||||
recursive_linking(wf_spec.start)
|
||||
return (graph.dot() if graph.dot() else '')
|
|
@ -0,0 +1,10 @@
|
|||
class TaskSpecNotSupportedError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class TaskNotSupportedError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class MissingSpecError(ValueError):
|
||||
pass
|
|
@ -0,0 +1,81 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import json
|
||||
import uuid
|
||||
from ..operators import Attrib
|
||||
from .dict import DictionarySerializer
|
||||
|
||||
class JSONSerializer(DictionarySerializer):
|
||||
|
||||
def serialize_workflow_spec(self, wf_spec, **kwargs):
|
||||
thedict = super(JSONSerializer, self).serialize_workflow_spec(
|
||||
wf_spec, **kwargs)
|
||||
return self._dumps(thedict)
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, **kwargs):
|
||||
thedict = self._loads(s_state)
|
||||
return super(JSONSerializer, self).deserialize_workflow_spec(
|
||||
thedict, **kwargs)
|
||||
|
||||
def serialize_workflow(self, workflow, **kwargs):
|
||||
thedict = super(JSONSerializer, self).serialize_workflow(
|
||||
workflow, **kwargs)
|
||||
return self._dumps(thedict)
|
||||
|
||||
def deserialize_workflow(self, s_state, **kwargs):
|
||||
thedict = self._loads(s_state)
|
||||
return super(JSONSerializer, self).deserialize_workflow(
|
||||
thedict, **kwargs)
|
||||
|
||||
def _object_hook(self, dct):
|
||||
if '__uuid__' in dct:
|
||||
return uuid.UUID(dct['__uuid__'])
|
||||
|
||||
if '__bytes__' in dct:
|
||||
return dct['__bytes__'].encode('ascii')
|
||||
|
||||
if '__attrib__' in dct:
|
||||
return Attrib(dct['__attrib__'])
|
||||
|
||||
return dct
|
||||
|
||||
def _jsonableHandler(self, obj):
|
||||
if hasattr(obj, 'jsonable'):
|
||||
return obj.jsonable()
|
||||
|
||||
raise 'Object of type %s with value of %s is not JSON serializable' % (
|
||||
type(obj), repr(obj))
|
||||
|
||||
|
||||
def _default(self, obj):
|
||||
if isinstance(obj, uuid.UUID):
|
||||
return {'__uuid__': obj.hex}
|
||||
|
||||
if isinstance(obj, bytes):
|
||||
return {'__bytes__': obj.decode('ascii')}
|
||||
|
||||
if isinstance(obj, Attrib):
|
||||
return {'__attrib__': obj.name}
|
||||
|
||||
raise TypeError('%r is not JSON serializable' % obj)
|
||||
|
||||
def _loads(self, text):
|
||||
return json.loads(text, object_hook=lambda o: self._object_hook(o))
|
||||
|
||||
def _dumps(self, dct):
|
||||
return json.dumps(dct, sort_keys=True, default=lambda o:
|
||||
self._default(o))
|
|
@ -0,0 +1,333 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2007-2012 Samuel Abels
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import re
|
||||
import xml.dom.minidom as minidom
|
||||
from .. import operators, specs
|
||||
from ..exceptions import StorageException
|
||||
from .base import Serializer
|
||||
|
||||
# Create a list of tag names out of the spec names.
|
||||
_spec_map = dict()
|
||||
for name in dir(specs):
|
||||
if name.startswith('_'):
|
||||
continue
|
||||
module = specs.__dict__[name]
|
||||
name = re.sub(r'(.)([A-Z])', r'\1-\2', name).lower()
|
||||
_spec_map[name] = module
|
||||
_spec_map['task'] = specs.Simple
|
||||
|
||||
_op_map = {'equals': operators.Equal,
|
||||
'not-equals': operators.NotEqual,
|
||||
'less-than': operators.LessThan,
|
||||
'greater-than': operators.GreaterThan,
|
||||
'matches': operators.Match}
|
||||
|
||||
_exc = StorageException
|
||||
|
||||
|
||||
class XmlSerializer(Serializer):
|
||||
|
||||
"""
|
||||
Parses XML into a WorkflowSpec object.
|
||||
"""
|
||||
|
||||
def deserialize_assign(self, workflow, start_node):
|
||||
"""
|
||||
Reads the "pre-assign" or "post-assign" tag from the given node.
|
||||
|
||||
start_node -- the xml node (xml.dom.minidom.Node)
|
||||
"""
|
||||
name = start_node.getAttribute('name')
|
||||
attrib = start_node.getAttribute('field')
|
||||
value = start_node.getAttribute('value')
|
||||
kwargs = {}
|
||||
if name == '':
|
||||
_exc('name attribute required')
|
||||
if attrib != '' and value != '':
|
||||
_exc('Both, field and right-value attributes found')
|
||||
elif attrib == '' and value == '':
|
||||
_exc('field or value attribute required')
|
||||
elif value != '':
|
||||
kwargs['right'] = value
|
||||
else:
|
||||
kwargs['right_attribute'] = attrib
|
||||
return operators.Assign(name, **kwargs)
|
||||
|
||||
def deserialize_data(self, workflow, start_node):
|
||||
"""
|
||||
Reads a "data" or "define" tag from the given node.
|
||||
|
||||
start_node -- the xml node (xml.dom.minidom.Node)
|
||||
"""
|
||||
name = start_node.getAttribute('name')
|
||||
value = start_node.getAttribute('value')
|
||||
return name, value
|
||||
|
||||
def deserialize_assign_list(self, workflow, start_node):
|
||||
"""
|
||||
Reads a list of assignments from the given node.
|
||||
|
||||
workflow -- the workflow
|
||||
start_node -- the xml structure (xml.dom.minidom.Node)
|
||||
"""
|
||||
# Collect all information.
|
||||
assignments = []
|
||||
for node in start_node.childNodes:
|
||||
if node.nodeType != minidom.Node.ELEMENT_NODE:
|
||||
continue
|
||||
if node.nodeName.lower() == 'assign':
|
||||
assignments.append(self.deserialize_assign(workflow, node))
|
||||
else:
|
||||
_exc('Unknown node: %s' % node.nodeName)
|
||||
return assignments
|
||||
|
||||
def deserialize_logical(self, node):
|
||||
"""
|
||||
Reads the logical tag from the given node, returns a Condition object.
|
||||
|
||||
node -- the xml node (xml.dom.minidom.Node)
|
||||
"""
|
||||
term1_attrib = node.getAttribute('left-field')
|
||||
term1_value = node.getAttribute('left-value')
|
||||
op = node.nodeName.lower()
|
||||
term2_attrib = node.getAttribute('right-field')
|
||||
term2_value = node.getAttribute('right-value')
|
||||
if op not in _op_map:
|
||||
_exc('Invalid operator')
|
||||
if term1_attrib != '' and term1_value != '':
|
||||
_exc('Both, left-field and left-value attributes found')
|
||||
elif term1_attrib == '' and term1_value == '':
|
||||
_exc('left-field or left-value attribute required')
|
||||
elif term1_value != '':
|
||||
left = term1_value
|
||||
else:
|
||||
left = operators.Attrib(term1_attrib)
|
||||
if term2_attrib != '' and term2_value != '':
|
||||
_exc('Both, right-field and right-value attributes found')
|
||||
elif term2_attrib == '' and term2_value == '':
|
||||
_exc('right-field or right-value attribute required')
|
||||
elif term2_value != '':
|
||||
right = term2_value
|
||||
else:
|
||||
right = operators.Attrib(term2_attrib)
|
||||
return _op_map[op](left, right)
|
||||
|
||||
def deserialize_condition(self, workflow, start_node):
|
||||
"""
|
||||
Reads the conditional statement from the given node.
|
||||
|
||||
workflow -- the workflow with which the concurrence is associated
|
||||
start_node -- the xml structure (xml.dom.minidom.Node)
|
||||
"""
|
||||
# Collect all information.
|
||||
condition = None
|
||||
spec_name = None
|
||||
for node in start_node.childNodes:
|
||||
if node.nodeType != minidom.Node.ELEMENT_NODE:
|
||||
continue
|
||||
if node.nodeName.lower() == 'successor':
|
||||
if spec_name is not None:
|
||||
_exc('Duplicate task name %s' % spec_name)
|
||||
if node.firstChild is None:
|
||||
_exc('Successor tag without a task name')
|
||||
spec_name = node.firstChild.nodeValue
|
||||
elif node.nodeName.lower() in _op_map:
|
||||
if condition is not None:
|
||||
_exc('Multiple conditions are not yet supported')
|
||||
condition = self.deserialize_logical(node)
|
||||
else:
|
||||
_exc('Unknown node: %s' % node.nodeName)
|
||||
|
||||
if condition is None:
|
||||
_exc('Missing condition in conditional statement')
|
||||
if spec_name is None:
|
||||
_exc('A %s has no task specified' % start_node.nodeName)
|
||||
return condition, spec_name
|
||||
|
||||
def deserialize_task_spec(self, workflow, start_node, read_specs):
|
||||
"""
|
||||
Reads the task from the given node and returns a tuple
|
||||
(start, end) that contains the stream of objects that model
|
||||
the behavior.
|
||||
|
||||
workflow -- the workflow with which the task is associated
|
||||
start_node -- the xml structure (xml.dom.minidom.Node)
|
||||
"""
|
||||
# Extract attributes from the node.
|
||||
nodetype = start_node.nodeName.lower()
|
||||
name = start_node.getAttribute('name').lower()
|
||||
context = start_node.getAttribute('context').lower()
|
||||
mutex = start_node.getAttribute('mutex').lower()
|
||||
cancel = start_node.getAttribute('cancel').lower()
|
||||
success = start_node.getAttribute('success').lower()
|
||||
times = start_node.getAttribute('times').lower()
|
||||
times_field = start_node.getAttribute('times-field').lower()
|
||||
threshold = start_node.getAttribute('threshold').lower()
|
||||
threshold_field = start_node.getAttribute('threshold-field').lower()
|
||||
file_name = start_node.getAttribute('file').lower()
|
||||
file_field = start_node.getAttribute('file-field').lower()
|
||||
kwargs = {'lock': [],
|
||||
'data': {},
|
||||
'defines': {},
|
||||
'pre_assign': [],
|
||||
'post_assign': []}
|
||||
if nodetype not in _spec_map:
|
||||
_exc('Invalid task type "%s"' % nodetype)
|
||||
if nodetype == 'start-task':
|
||||
name = 'start'
|
||||
if name == '':
|
||||
_exc('Invalid task name "%s"' % name)
|
||||
if name in read_specs:
|
||||
_exc('Duplicate task name "%s"' % name)
|
||||
if cancel != '' and cancel != '0':
|
||||
kwargs['cancel'] = True
|
||||
if success != '' and success != '0':
|
||||
kwargs['success'] = True
|
||||
if times != '':
|
||||
kwargs['times'] = int(times)
|
||||
if times_field != '':
|
||||
kwargs['times'] = operators.Attrib(times_field)
|
||||
if threshold != '':
|
||||
kwargs['threshold'] = int(threshold)
|
||||
if threshold_field != '':
|
||||
kwargs['threshold'] = operators.Attrib(threshold_field)
|
||||
if file_name != '':
|
||||
kwargs['file'] = file_name
|
||||
if file_field != '':
|
||||
kwargs['file'] = operators.Attrib(file_field)
|
||||
if nodetype == 'choose':
|
||||
kwargs['choice'] = []
|
||||
if nodetype == 'trigger':
|
||||
context = [context]
|
||||
if mutex != '':
|
||||
context = mutex
|
||||
|
||||
# Walk through the children of the node.
|
||||
successors = []
|
||||
for node in start_node.childNodes:
|
||||
if node.nodeType != minidom.Node.ELEMENT_NODE:
|
||||
continue
|
||||
if node.nodeName == 'description':
|
||||
kwargs['description'] = node.firstChild.nodeValue
|
||||
elif node.nodeName == 'successor' \
|
||||
or node.nodeName == 'default-successor':
|
||||
if node.firstChild is None:
|
||||
_exc('Empty %s tag' % node.nodeName)
|
||||
successors.append((None, node.firstChild.nodeValue))
|
||||
elif node.nodeName == 'conditional-successor':
|
||||
successors.append(self.deserialize_condition(workflow, node))
|
||||
elif node.nodeName == 'define':
|
||||
key, value = self.deserialize_data(workflow, node)
|
||||
kwargs['defines'][key] = value
|
||||
# "property" tag exists for backward compatibility.
|
||||
elif node.nodeName == 'data' or node.nodeName == 'property':
|
||||
key, value = self.deserialize_data(workflow, node)
|
||||
kwargs['data'][key] = value
|
||||
elif node.nodeName == 'pre-assign':
|
||||
kwargs['pre_assign'].append(
|
||||
self.deserialize_assign(workflow, node))
|
||||
elif node.nodeName == 'post-assign':
|
||||
kwargs['post_assign'].append(
|
||||
self.deserialize_assign(workflow, node))
|
||||
elif node.nodeName == 'in':
|
||||
kwargs['in_assign'] = self.deserialize_assign_list(
|
||||
workflow, node)
|
||||
elif node.nodeName == 'out':
|
||||
kwargs['out_assign'] = self.deserialize_assign_list(
|
||||
workflow, node)
|
||||
elif node.nodeName == 'cancel':
|
||||
if node.firstChild is None:
|
||||
_exc('Empty %s tag' % node.nodeName)
|
||||
if context == '':
|
||||
context = []
|
||||
elif not isinstance(context, list):
|
||||
context = [context]
|
||||
context.append(node.firstChild.nodeValue)
|
||||
elif node.nodeName == 'lock':
|
||||
if node.firstChild is None:
|
||||
_exc('Empty %s tag' % node.nodeName)
|
||||
kwargs['lock'].append(node.firstChild.nodeValue)
|
||||
elif node.nodeName == 'pick':
|
||||
if node.firstChild is None:
|
||||
_exc('Empty %s tag' % node.nodeName)
|
||||
kwargs['choice'].append(node.firstChild.nodeValue)
|
||||
else:
|
||||
_exc('Unknown node: %s' % node.nodeName)
|
||||
|
||||
# Create a new instance of the task spec.
|
||||
module = _spec_map[nodetype]
|
||||
if nodetype == 'start-task':
|
||||
spec = module(workflow, **kwargs)
|
||||
elif nodetype == 'multi-instance' or nodetype == 'thread-split':
|
||||
if times == '' and times_field == '':
|
||||
_exc('Missing "times" or "times-field" in "%s"' % name)
|
||||
elif times != '' and times_field != '':
|
||||
_exc('Both, "times" and "times-field" in "%s"' % name)
|
||||
spec = module(workflow, name, **kwargs)
|
||||
elif context == '':
|
||||
spec = module(workflow, name, **kwargs)
|
||||
else:
|
||||
spec = module(workflow, name, context, **kwargs)
|
||||
|
||||
read_specs[name] = spec, successors
|
||||
|
||||
def deserialize_workflow_spec(self, s_state, filename=None):
|
||||
"""
|
||||
Reads the workflow from the given XML structure and returns a
|
||||
WorkflowSpec instance.
|
||||
"""
|
||||
dom = minidom.parseString(s_state)
|
||||
node = dom.getElementsByTagName('process-definition')[0]
|
||||
name = node.getAttribute('name')
|
||||
if name == '':
|
||||
_exc('%s without a name attribute' % node.nodeName)
|
||||
|
||||
# Read all task specs and create a list of successors.
|
||||
workflow_spec = specs.WorkflowSpec(name, filename)
|
||||
del workflow_spec.task_specs['Start']
|
||||
end = specs.Simple(workflow_spec, 'End'), []
|
||||
read_specs = dict(end=end)
|
||||
for child_node in node.childNodes:
|
||||
if child_node.nodeType != minidom.Node.ELEMENT_NODE:
|
||||
continue
|
||||
if child_node.nodeName == 'name':
|
||||
workflow_spec.name = child_node.firstChild.nodeValue
|
||||
elif child_node.nodeName == 'description':
|
||||
workflow_spec.description = child_node.firstChild.nodeValue
|
||||
elif child_node.nodeName.lower() in _spec_map:
|
||||
self.deserialize_task_spec(
|
||||
workflow_spec, child_node, read_specs)
|
||||
else:
|
||||
_exc('Unknown node: %s' % child_node.nodeName)
|
||||
|
||||
# Remove the default start-task from the workflow.
|
||||
workflow_spec.start = read_specs['start'][0]
|
||||
|
||||
# Connect all task specs.
|
||||
for name in read_specs:
|
||||
spec, successors = read_specs[name]
|
||||
for condition, successor_name in successors:
|
||||
if successor_name not in read_specs:
|
||||
_exc('Unknown successor: "%s"' % successor_name)
|
||||
successor, foo = read_specs[successor_name]
|
||||
if condition is None:
|
||||
spec.connect(successor)
|
||||
else:
|
||||
spec.connect_if(condition, successor)
|
||||
return workflow_spec
|
|
@ -0,0 +1,810 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import str
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
import re
|
||||
import warnings
|
||||
from lxml import etree
|
||||
from lxml.etree import SubElement
|
||||
from ..workflow import Workflow
|
||||
from .. import specs, operators
|
||||
from ..task import Task, TaskStateNames
|
||||
from ..operators import (Attrib, Assign, PathAttrib, Equal, NotEqual,
|
||||
GreaterThan, LessThan, Match)
|
||||
from ..specs import (Cancel, AcquireMutex, CancelTask, Celery, Choose,
|
||||
ExclusiveChoice, Execute, Gate, Join, MultiChoice,
|
||||
MultiInstance, ReleaseMutex, Simple, WorkflowSpec,
|
||||
SubWorkflow, StartTask, ThreadMerge,
|
||||
ThreadSplit, ThreadStart, Merge, Trigger, LoopResetTask)
|
||||
from .base import Serializer
|
||||
from .exceptions import TaskNotSupportedError
|
||||
|
||||
# Create a list of tag names out of the spec names.
|
||||
_spec_map = dict()
|
||||
for name in dir(specs):
|
||||
if name.startswith('_'):
|
||||
continue
|
||||
module = specs.__dict__[name]
|
||||
name = re.sub(r'(.)([A-Z])', r'\1-\2', name).lower()
|
||||
_spec_map[name] = module
|
||||
_spec_map['task'] = specs.Simple
|
||||
|
||||
_op_map = {'equals': operators.Equal,
|
||||
'not-equals': operators.NotEqual,
|
||||
'less-than': operators.LessThan,
|
||||
'greater-than': operators.GreaterThan,
|
||||
'matches': operators.Match}
|
||||
|
||||
|
||||
class XmlSerializer(Serializer):
|
||||
|
||||
def serialize_attrib(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.Attrib`.
|
||||
|
||||
Example::
|
||||
|
||||
<attribute>foobar</attribute>
|
||||
"""
|
||||
elem = etree.Element('attribute')
|
||||
elem.text = op.name
|
||||
return elem
|
||||
|
||||
def deserialize_attrib(self, elem):
|
||||
return Attrib(str(elem.text))
|
||||
|
||||
def serialize_pathattrib(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.PathAttrib`.
|
||||
|
||||
Example::
|
||||
|
||||
<path>foobar</path>
|
||||
"""
|
||||
elem = etree.Element('path')
|
||||
elem.text = op.path
|
||||
return elem
|
||||
|
||||
def deserialize_pathattrib(self, elem):
|
||||
return PathAttrib(str(elem.text))
|
||||
|
||||
def serialize_assign(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.Assign`.
|
||||
|
||||
Example::
|
||||
|
||||
<assign>
|
||||
<name>foobar</name>
|
||||
<value>doodle</value>
|
||||
</assign>
|
||||
"""
|
||||
elem = etree.Element('assign')
|
||||
self.serialize_value(SubElement(elem, 'name'), op.left_attribute)
|
||||
if op.right:
|
||||
self.serialize_value(SubElement(elem, 'value'), op.right)
|
||||
if op.right_attribute:
|
||||
self.serialize_value(
|
||||
SubElement(elem, 'value-attribute'), op.right_attribute)
|
||||
return elem
|
||||
|
||||
def deserialize_assign(self, elem):
|
||||
name = elem.findtext('name')
|
||||
value = elem.findtext('value')
|
||||
value_attribute = elem.findtext('value-attribute')
|
||||
return Assign(left_attribute=name,
|
||||
right_attribute=value_attribute,
|
||||
right=value)
|
||||
|
||||
def serialize_value(self, parent_elem, value):
|
||||
"""
|
||||
Serializes str, Attrib, or PathAttrib objects.
|
||||
|
||||
Example::
|
||||
|
||||
<attribute>foobar</attribute>
|
||||
"""
|
||||
if isinstance(value, (str, int)) or type(value).__name__ == 'str':
|
||||
parent_elem.text = str(value)
|
||||
elif value is None:
|
||||
parent_elem.text = None
|
||||
else:
|
||||
parent_elem.append(value.serialize(self))
|
||||
|
||||
def deserialize_value(self, value_elem):
|
||||
value = value_elem.text
|
||||
if value is not None:
|
||||
return str(value)
|
||||
value = value_elem[0]
|
||||
if value.tag == 'attribute':
|
||||
return Attrib.deserialize(self, value)
|
||||
elif value.tag == 'path':
|
||||
return PathAttrib.deserialize(self, value)
|
||||
elif value.tag == 'assign':
|
||||
return Assign.deserialize(self, value)
|
||||
else:
|
||||
raise ValueError('unsupported tag:', value.tag)
|
||||
|
||||
def serialize_value_map(self, map_elem, thedict):
|
||||
"""
|
||||
Serializes a dictionary of key/value pairs, where the values are
|
||||
either strings, or Attrib, or PathAttrib objects.
|
||||
|
||||
Example::
|
||||
|
||||
<variable>
|
||||
<name>foo</name>
|
||||
<value>text</value>
|
||||
</variable>
|
||||
<variable>
|
||||
<name>foo2</name>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
</variable>
|
||||
"""
|
||||
for key, value in sorted((str(k), v) for (k, v) in thedict.items()):
|
||||
var_elem = SubElement(map_elem, 'variable')
|
||||
SubElement(var_elem, 'name').text = str(key)
|
||||
value_elem = SubElement(var_elem, 'value')
|
||||
self.serialize_value(value_elem, value)
|
||||
return map_elem
|
||||
|
||||
def deserialize_value_map(self, map_elem):
|
||||
themap = {}
|
||||
for var_elem in map_elem:
|
||||
name = str(var_elem.find('name').text)
|
||||
value_elem = var_elem.find('value')
|
||||
themap[name] = self.deserialize_value(value_elem)
|
||||
return themap
|
||||
|
||||
def serialize_value_list(self, list_elem, thelist):
|
||||
"""
|
||||
Serializes a list, where the values are objects of type
|
||||
str, Attrib, or PathAttrib.
|
||||
|
||||
Example::
|
||||
|
||||
<value>text</value>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
<value><path>foobar</path></value>
|
||||
"""
|
||||
for value in thelist:
|
||||
value_elem = SubElement(list_elem, 'value')
|
||||
self.serialize_value(value_elem, value)
|
||||
return list_elem
|
||||
|
||||
def deserialize_value_list(self, elem):
|
||||
thelist = []
|
||||
for value_elem in elem:
|
||||
thelist.append(self.deserialize_value(value_elem))
|
||||
return thelist
|
||||
|
||||
def serialize_operator_equal(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.Equal`.
|
||||
|
||||
Example::
|
||||
|
||||
<equals>
|
||||
<value>text</value>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
<value><path>foobar</path></value>
|
||||
</equals>
|
||||
"""
|
||||
elem = etree.Element('equals')
|
||||
return self.serialize_value_list(elem, op.args)
|
||||
|
||||
def deserialize_operator_equal(self, elem):
|
||||
return Equal(*self.deserialize_value_list(elem))
|
||||
|
||||
def serialize_operator_not_equal(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
|
||||
|
||||
Example::
|
||||
|
||||
<not-equals>
|
||||
<value>text</value>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
<value><path>foobar</path></value>
|
||||
</not-equals>
|
||||
"""
|
||||
elem = etree.Element('not-equals')
|
||||
return self.serialize_value_list(elem, op.args)
|
||||
|
||||
def deserialize_operator_not_equal(self, elem):
|
||||
return NotEqual(*self.deserialize_value_list(elem))
|
||||
|
||||
def serialize_operator_greater_than(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
|
||||
|
||||
Example::
|
||||
|
||||
<greater-than>
|
||||
<value>text</value>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
</greater-than>
|
||||
"""
|
||||
elem = etree.Element('greater-than')
|
||||
return self.serialize_value_list(elem, op.args)
|
||||
|
||||
def deserialize_operator_greater_than(self, elem):
|
||||
return GreaterThan(*self.deserialize_value_list(elem))
|
||||
|
||||
def serialize_operator_less_than(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
|
||||
|
||||
Example::
|
||||
|
||||
<less-than>
|
||||
<value>text</value>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
</less-than>
|
||||
"""
|
||||
elem = etree.Element('less-than')
|
||||
return self.serialize_value_list(elem, op.args)
|
||||
|
||||
def deserialize_operator_less_than(self, elem):
|
||||
return LessThan(*self.deserialize_value_list(elem))
|
||||
|
||||
def serialize_operator_match(self, op):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
|
||||
|
||||
Example::
|
||||
|
||||
<matches>
|
||||
<value>text</value>
|
||||
<value><attribute>foobar</attribute></value>
|
||||
</matches>
|
||||
"""
|
||||
elem = etree.Element('matches')
|
||||
return self.serialize_value_list(elem, op.args)
|
||||
|
||||
def deserialize_operator_match(self, elem):
|
||||
return Match(*self.deserialize_value_list(elem))
|
||||
|
||||
def deserialize_operator(self, elem):
|
||||
cls = _op_map[elem.tag]
|
||||
return cls.deserialize(self, elem)
|
||||
|
||||
def serialize_task_spec(self, spec, elem):
|
||||
"""
|
||||
Serializes common attributes of :meth:`SpiffWorkflow.specs.TaskSpec`.
|
||||
"""
|
||||
if spec.id is not None:
|
||||
SubElement(elem, 'id').text = str(spec.id)
|
||||
SubElement(elem, 'name').text = spec.name
|
||||
if spec.description:
|
||||
SubElement(elem, 'description').text = spec.description
|
||||
if spec.manual:
|
||||
SubElement(elem, 'manual')
|
||||
if spec.internal:
|
||||
SubElement(elem, 'internal')
|
||||
SubElement(elem, 'lookahead').text = str(spec.lookahead)
|
||||
inputs = [t.name for t in spec.inputs]
|
||||
outputs = [t.name for t in spec.outputs]
|
||||
self.serialize_value_list(SubElement(elem, 'inputs'), inputs)
|
||||
self.serialize_value_list(SubElement(elem, 'outputs'), outputs)
|
||||
self.serialize_value_map(SubElement(elem, 'data'), spec.data)
|
||||
self.serialize_value_map(SubElement(elem, 'defines'), spec.defines)
|
||||
self.serialize_value_list(SubElement(elem, 'pre-assign'),
|
||||
spec.pre_assign)
|
||||
self.serialize_value_list(SubElement(elem, 'post-assign'),
|
||||
spec.post_assign)
|
||||
|
||||
# Note: Events are not serialized; this is documented in
|
||||
# the TaskSpec API docs.
|
||||
|
||||
return elem
|
||||
|
||||
def deserialize_task_spec(self, wf_spec, elem, spec_cls, **kwargs):
|
||||
name = elem.findtext('name')
|
||||
spec = spec_cls(wf_spec, name, **kwargs)
|
||||
theid = elem.findtext('id')
|
||||
spec.id = theid if theid is not None else None
|
||||
spec.description = elem.findtext('description', spec.description)
|
||||
spec.manual = elem.findtext('manual', spec.manual)
|
||||
spec.internal = elem.find('internal') is not None
|
||||
spec.lookahead = int(elem.findtext('lookahead', spec.lookahead))
|
||||
|
||||
data_elem = elem.find('data')
|
||||
if data_elem is not None:
|
||||
spec.data = self.deserialize_value_map(data_elem)
|
||||
defines_elem = elem.find('defines')
|
||||
if defines_elem is not None:
|
||||
spec.defines = self.deserialize_value_map(defines_elem)
|
||||
pre_assign_elem = elem.find('pre-assign')
|
||||
if pre_assign_elem is not None:
|
||||
spec.pre_assign = self.deserialize_value_list(pre_assign_elem)
|
||||
post_assign_elem = elem.find('post-assign')
|
||||
if post_assign_elem is not None:
|
||||
spec.post_assign = self.deserialize_value_list(post_assign_elem)
|
||||
|
||||
# We can't restore inputs and outputs yet because they may not be
|
||||
# deserialized yet. So keep the names, and resolve them in the
|
||||
# workflowspec deserializer.
|
||||
spec.inputs = self.deserialize_value_list(elem.find('inputs'))
|
||||
spec.outputs = self.deserialize_value_list(elem.find('outputs'))
|
||||
|
||||
return spec
|
||||
|
||||
def serialize_acquire_mutex(self, spec):
|
||||
"""
|
||||
Serializer for :meth:`SpiffWorkflow.specs.AcquireMutex`.
|
||||
"""
|
||||
elem = etree.Element('acquire-mutex')
|
||||
self.serialize_task_spec(spec, elem)
|
||||
SubElement(elem, 'mutex').text = spec.mutex
|
||||
return elem
|
||||
|
||||
def deserialize_acquire_mutex(self, wf_spec, elem, cls=AcquireMutex,
|
||||
**kwargs):
|
||||
mutex = elem.findtext('mutex')
|
||||
return self.deserialize_task_spec(wf_spec,
|
||||
elem,
|
||||
cls,
|
||||
mutex=mutex,
|
||||
**kwargs)
|
||||
|
||||
def serialize_cancel(self, spec):
|
||||
elem = etree.Element('cancel')
|
||||
self.serialize_task_spec(spec, elem)
|
||||
SubElement(elem, 'cancel-successfully')
|
||||
return elem
|
||||
|
||||
def deserialize_cancel(self, wf_spec, elem, cls=Cancel,
|
||||
**kwargs):
|
||||
success = elem.find('cancel-successfully') is not None
|
||||
return self.deserialize_task_spec(wf_spec,
|
||||
elem,
|
||||
cls,
|
||||
success=success,
|
||||
**kwargs)
|
||||
|
||||
def serialize_cancel_task(self, spec):
|
||||
elem = etree.Element('cancel-task')
|
||||
return self.serialize_trigger(spec, elem)
|
||||
|
||||
def deserialize_cancel_task(self, wf_spec, elem, cls=CancelTask, **kwargs):
|
||||
return self.deserialize_trigger(wf_spec, elem, cls, **kwargs)
|
||||
|
||||
def serialize_celery(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('celery')
|
||||
|
||||
SubElement(elem, 'call').text = spec.call
|
||||
args_elem = SubElement(elem, 'args')
|
||||
self.serialize_value_list(args_elem, spec.args)
|
||||
kwargs_elem = SubElement(elem, 'kwargs')
|
||||
self.serialize_value_map(kwargs_elem, spec.kwargs)
|
||||
if spec.merge_results:
|
||||
SubElement(elem, 'merge-results')
|
||||
SubElement(elem, 'result-key').text = spec.result_key
|
||||
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_celery(self, wf_spec, elem, cls=Celery, **kwargs):
|
||||
call = elem.findtext('call')
|
||||
args = self.deserialize_value_list(elem.find('args'))
|
||||
result_key = elem.findtext('call')
|
||||
merge_results = elem.find('merge-results') is not None
|
||||
spec = self.deserialize_task_spec(wf_spec,
|
||||
elem,
|
||||
cls,
|
||||
call=call,
|
||||
call_args=args,
|
||||
result_key=result_key,
|
||||
merge_results=merge_results,
|
||||
**kwargs)
|
||||
spec.kwargs = self.deserialize_value_map(elem.find('kwargs'))
|
||||
return spec
|
||||
|
||||
def serialize_choose(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('choose')
|
||||
elem = self.serialize_task_spec(spec, elem)
|
||||
SubElement(elem, 'context').text = spec.context
|
||||
choice_elem = SubElement(elem, 'choice')
|
||||
self.serialize_value_list(choice_elem, spec.choice)
|
||||
return elem
|
||||
|
||||
def deserialize_choose(self, wf_spec, elem, cls=Choose, **kwargs):
|
||||
choice = self.deserialize_value_list(elem.find('choice'))
|
||||
context = elem.findtext('context')
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, choice=choice,
|
||||
context=context, **kwargs)
|
||||
|
||||
def serialize_exclusive_choice(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('exclusive-choice')
|
||||
self.serialize_multi_choice(spec, elem)
|
||||
SubElement(elem, 'default_task_spec').text = spec.default_task_spec
|
||||
return elem
|
||||
|
||||
def deserialize_exclusive_choice(self, wf_spec, elem, cls=ExclusiveChoice,
|
||||
**kwargs):
|
||||
spec = self.deserialize_multi_choice(wf_spec, elem, cls, **kwargs)
|
||||
spec.default_task_spec = elem.findtext('default_task_spec')
|
||||
return spec
|
||||
|
||||
def serialize_execute(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('execute')
|
||||
self.serialize_value_list(SubElement(elem, 'args'), spec.args)
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_execute(self, wf_spec, elem, cls=Execute, **kwargs):
|
||||
args = self.deserialize_value_list(elem.find('args'))
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, args=args,
|
||||
**kwargs)
|
||||
|
||||
def serialize_gate(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('gate')
|
||||
SubElement(elem, 'context').text = spec.context
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_gate(self, wf_spec, elem, cls=Gate, **kwargs):
|
||||
context = elem.findtext('context')
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, context=context,
|
||||
**kwargs)
|
||||
|
||||
def serialize_join(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('join')
|
||||
if spec.split_task:
|
||||
SubElement(elem, 'split-task').text = spec.split_task
|
||||
if spec.threshold is not None:
|
||||
self.serialize_value(SubElement(elem, 'threshold'), spec.threshold)
|
||||
if spec.cancel_remaining:
|
||||
SubElement(elem, 'cancel-remaining')
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_join(self, wf_spec, elem, cls=Join, **kwargs):
|
||||
split_task = elem.findtext('split-task')
|
||||
if elem.find('threshold') is None:
|
||||
threshold = None
|
||||
else:
|
||||
threshold = self.deserialize_value(elem.find('threshold'))
|
||||
cancel = elem.find('cancel-remaining') is not None
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls,
|
||||
split_task=split_task,
|
||||
threshold=threshold,
|
||||
cancel=cancel,
|
||||
**kwargs)
|
||||
|
||||
def serialize_multi_choice(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('multi-choice')
|
||||
if spec.choice:
|
||||
self.serialize_value_list(SubElement(elem, 'choice'), spec.choice)
|
||||
options = SubElement(elem, 'options')
|
||||
for condition, spec_name in spec.cond_task_specs:
|
||||
option_elem = SubElement(options, 'option')
|
||||
if condition is not None:
|
||||
cond_elem = SubElement(option_elem, 'condition')
|
||||
cond_elem.append(condition.serialize(self))
|
||||
SubElement(option_elem, 'output').text = spec_name
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_multi_choice(self, wf_spec, elem, cls=MultiChoice,
|
||||
**kwargs):
|
||||
spec = self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
|
||||
if elem.find('choice') is not None:
|
||||
spec.choice = self.deserialize_value_list(elem.find('choice'))
|
||||
if elem.find('options') is not None:
|
||||
for option_elem in elem.find('options'):
|
||||
condition_elem = option_elem.find('condition')
|
||||
if condition_elem is not None:
|
||||
condition = self.deserialize_operator(condition_elem[0])
|
||||
else:
|
||||
condition = None
|
||||
spec_name = option_elem.findtext('output')
|
||||
spec.cond_task_specs.append((condition, spec_name))
|
||||
return spec
|
||||
|
||||
def serialize_multi_instance(self, spec):
|
||||
elem = etree.Element('multi-instance')
|
||||
self.serialize_value(SubElement(elem, 'times'), spec.times)
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_multi_instance(self, wf_spec, elem, cls=None,
|
||||
**kwargs):
|
||||
if cls == None:
|
||||
cls = MultiInstance
|
||||
#cls = MultiInstance(wf_spec,elem.find('name'),elem.find('times'))
|
||||
times = self.deserialize_value(elem.find('times'))
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, times=times,
|
||||
**kwargs)
|
||||
|
||||
def serialize_release_mutex(self, spec):
|
||||
elem = etree.Element('release-mutex')
|
||||
SubElement(elem, 'mutex').text = spec.mutex
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_release_mutex(self, wf_spec, elem, cls=ReleaseMutex,
|
||||
**kwargs):
|
||||
mutex = elem.findtext('mutex')
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, mutex=mutex,
|
||||
**kwargs)
|
||||
|
||||
def serialize_simple(self, spec):
|
||||
elem = etree.Element('simple')
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_simple(self, wf_spec, elem, cls=Simple, **kwargs):
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
|
||||
|
||||
def serialize_start_task(self, spec):
|
||||
elem = etree.Element('start-task')
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_start_task(self, wf_spec, elem, cls=StartTask, **kwargs):
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
|
||||
|
||||
def serialize_sub_workflow(self, spec):
|
||||
warnings.warn("SubWorkflows cannot be safely serialized as they only" +
|
||||
" store a reference to the subworkflow specification " +
|
||||
" as a path to an external XML file.")
|
||||
elem = etree.Element('sub-workflow')
|
||||
SubElement(elem, 'filename').text = spec.file
|
||||
in_elem = SubElement(elem, 'in-assign')
|
||||
self.serialize_value_list(in_elem, spec.in_assign)
|
||||
out_elem = SubElement(elem, 'out-assign')
|
||||
self.serialize_value_list(out_elem, spec.out_assign)
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_sub_workflow(self, wf_spec, elem, cls=SubWorkflow,
|
||||
**kwargs):
|
||||
warnings.warn("SubWorkflows cannot be safely deserialized as they " +
|
||||
"only store a reference to the subworkflow " +
|
||||
"specification as a path to an external XML file.")
|
||||
filename = elem.findtext('filename')
|
||||
in_elem = elem.find('in-assign')
|
||||
in_assign = self.deserialize_value_list(in_elem)
|
||||
out_elem = elem.find('out-assign')
|
||||
out_assign = self.deserialize_value_list(out_elem)
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, file=filename,
|
||||
in_assign=in_assign,
|
||||
out_assign=out_assign, **kwargs)
|
||||
|
||||
def serialize_thread_merge(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('thread-merge')
|
||||
return self.serialize_join(spec, elem)
|
||||
|
||||
def deserialize_thread_merge(self, wf_spec, elem, cls=ThreadMerge,
|
||||
**kwargs):
|
||||
return self.deserialize_join(wf_spec, elem, cls, **kwargs)
|
||||
|
||||
def serialize_thread_split(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('thread-split')
|
||||
self.serialize_value(SubElement(elem, 'times'), spec.times)
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_thread_split(self, wf_spec, elem, cls=ThreadSplit,
|
||||
**kwargs):
|
||||
times_elem = elem.find('times')
|
||||
if times_elem is not None:
|
||||
times = self.deserialize_value(times_elem)
|
||||
else:
|
||||
times = 1
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, times=times,
|
||||
suppress_threadstart_creation=True,
|
||||
**kwargs)
|
||||
|
||||
def serialize_thread_start(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('thread-start')
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_thread_start(self, wf_spec, elem, cls=ThreadStart,
|
||||
**kwargs):
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
|
||||
|
||||
def serialize_merge(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('merge')
|
||||
SubElement(elem, 'split-task').text = spec.split_task
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_merge(self, wf_spec, elem, cls=Merge, **kwargs):
|
||||
split_task = elem.findtext('split-task')
|
||||
return self.deserialize_task_spec(wf_spec, elem, cls,
|
||||
split_task=split_task, **kwargs)
|
||||
|
||||
def serialize_trigger(self, spec, elem=None):
|
||||
if elem is None:
|
||||
elem = etree.Element('trigger')
|
||||
self.serialize_value_list(SubElement(elem, 'context'), spec.context)
|
||||
self.serialize_value(SubElement(elem, 'times'), spec.times)
|
||||
SubElement(elem, 'queued').text = str(spec.queued)
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_trigger(self, wf_spec, elem, cls=Trigger, **kwargs):
|
||||
context = self.deserialize_value_list(elem.find('context'))
|
||||
times = self.deserialize_value(elem.find('times'))
|
||||
spec = self.deserialize_task_spec(wf_spec, elem, cls, context=context,
|
||||
times=times, **kwargs)
|
||||
try:
|
||||
spec.queued = int(elem.findtext('queued'))
|
||||
except ValueError:
|
||||
pass
|
||||
return spec
|
||||
|
||||
def serialize_workflow_spec(self, spec, **kwargs):
|
||||
wf_elem = etree.Element('workflow')
|
||||
SubElement(wf_elem, 'name').text = spec.name
|
||||
SubElement(wf_elem, 'description').text = spec.description
|
||||
if spec.file:
|
||||
SubElement(wf_elem, 'filename').text = spec.file
|
||||
tasks_elem = SubElement(wf_elem, 'tasks')
|
||||
for task_name, task_spec in sorted(spec.task_specs.items()):
|
||||
tasks_elem.append(task_spec.serialize(self))
|
||||
return wf_elem
|
||||
|
||||
def deserialize_workflow_spec(self, elem, **kwargs):
|
||||
name = elem.findtext('name')
|
||||
filename = elem.findtext('filename')
|
||||
spec = WorkflowSpec(name, filename=filename, nostart=True)
|
||||
spec.description = elem.findtext('description')
|
||||
|
||||
# Add all tasks.
|
||||
tasks_elem = elem.find('tasks')
|
||||
for task_elem in tasks_elem:
|
||||
cls = _spec_map[task_elem.tag]
|
||||
task_spec = cls.deserialize(self, spec, task_elem)
|
||||
spec.task_specs[task_spec.name] = task_spec
|
||||
spec.start = spec.task_specs['Start']
|
||||
|
||||
# Connect the tasks.
|
||||
for name, task_spec in list(spec.task_specs.items()):
|
||||
task_spec.inputs = [spec.get_task_spec_from_name(t)
|
||||
for t in task_spec.inputs]
|
||||
task_spec.outputs = [spec.get_task_spec_from_name(t)
|
||||
for t in task_spec.outputs]
|
||||
return spec
|
||||
|
||||
def serialize_workflow(self, workflow, **kwargs):
|
||||
assert isinstance(workflow, Workflow)
|
||||
elem = etree.Element('workflow')
|
||||
wf_spec_elem = self.serialize_workflow_spec(workflow.spec)
|
||||
wf_spec_elem.tag = 'spec'
|
||||
elem.append(wf_spec_elem)
|
||||
|
||||
data_elem = SubElement(elem, 'data')
|
||||
self.serialize_value_map(data_elem, workflow.data)
|
||||
|
||||
if workflow.last_task is not None:
|
||||
SubElement(elem, 'last-task').text = str(workflow.last_task.id)
|
||||
|
||||
# outer_workflow
|
||||
# SubElement(elem, 'outer-workflow').text = workflow.outer_workflow.id
|
||||
|
||||
if workflow.success:
|
||||
SubElement(elem, 'success')
|
||||
task_tree_elem = SubElement(elem, 'task-tree')
|
||||
task_tree_elem.append(self.serialize_task(workflow.task_tree))
|
||||
|
||||
return elem
|
||||
|
||||
def deserialize_workflow(self, elem, **kwargs):
|
||||
wf_spec_elem = elem.find('spec')
|
||||
wf_spec = self.deserialize_workflow_spec(wf_spec_elem, **kwargs)
|
||||
workflow = Workflow(wf_spec)
|
||||
|
||||
workflow.data = self.deserialize_value_map(elem.find('data'))
|
||||
workflow.success = elem.find('success') is not None
|
||||
|
||||
# outer_workflow
|
||||
# workflow.outer_workflow =
|
||||
# find_workflow_by_id(remap_workflow_id(elem['outer_workflow']))
|
||||
|
||||
task_tree_elem = elem.find('task-tree')
|
||||
workflow.task_tree = self.deserialize_task(workflow, task_tree_elem[0])
|
||||
|
||||
# Re-connect parents
|
||||
for task in workflow.get_tasks():
|
||||
task.parent = workflow.get_task(task.parent)
|
||||
|
||||
# last_task
|
||||
last_task = elem.findtext('last-task')
|
||||
if last_task is not None:
|
||||
workflow.last_task = workflow.get_task(last_task)
|
||||
|
||||
return workflow
|
||||
|
||||
def serialize_loop_reset_task(self, spec):
|
||||
elem = etree.Element('loop-reset-task')
|
||||
SubElement(elem, 'destination_id').text = str(spec.destination_id)
|
||||
SubElement(elem, 'destination_spec_name').text = str(spec.destination_spec_name)
|
||||
return self.serialize_task_spec(spec, elem)
|
||||
|
||||
def deserialize_loop_reset_task(self, wf_spec, elem, cls=LoopResetTask, **kwargs):
|
||||
destination_id = elem.findtext('destination_id')
|
||||
destination_spec_name = elem.findtext('destination_spec_name')
|
||||
|
||||
task = self.deserialize_task_spec(wf_spec, elem, cls,
|
||||
destination_id=destination_id,
|
||||
destination_spec_name=destination_spec_name)
|
||||
return task
|
||||
|
||||
def serialize_task(self, task, skip_children=False):
|
||||
assert isinstance(task, Task)
|
||||
|
||||
if isinstance(task.task_spec, SubWorkflow):
|
||||
raise TaskNotSupportedError(
|
||||
"Subworkflow tasks cannot be serialized (due to their use of" +
|
||||
" internal_data to store the subworkflow).")
|
||||
|
||||
# We are not serializing task.workflow; the deserializer accepts
|
||||
# an already-deserialized Workflow instead.
|
||||
elem = etree.Element('task')
|
||||
if task.id is not None:
|
||||
SubElement(elem, 'id').text = str(task.id)
|
||||
if task.parent is not None:
|
||||
SubElement(elem, 'parent').text = str(task.parent.id)
|
||||
|
||||
if not skip_children:
|
||||
children_elem = SubElement(elem, 'children')
|
||||
for child in task.children:
|
||||
child_elem = self.serialize_task(child)
|
||||
children_elem.append(child_elem)
|
||||
|
||||
SubElement(elem, 'state').text = task.get_state_name()
|
||||
if task.triggered:
|
||||
SubElement(elem, 'triggered')
|
||||
SubElement(elem, 'spec').text = task.task_spec.name
|
||||
SubElement(elem, 'last-state-change').text = str(
|
||||
task.last_state_change)
|
||||
self.serialize_value_map(SubElement(elem, 'data'), task.data)
|
||||
internal_data_elem = SubElement(elem, 'internal-data')
|
||||
self.serialize_value_map(internal_data_elem, task.internal_data)
|
||||
|
||||
return elem
|
||||
|
||||
def deserialize_task(self, workflow, elem):
|
||||
assert isinstance(workflow, Workflow)
|
||||
|
||||
task_spec_name = elem.findtext('spec')
|
||||
task_spec = workflow.get_task_spec_from_name(task_spec_name)
|
||||
task = Task(workflow, task_spec)
|
||||
task.id = elem.findtext('id')
|
||||
# The parent is later resolved by the workflow deserializer
|
||||
task.parent = elem.findtext('parent')
|
||||
|
||||
for child_elem in elem.find('children'):
|
||||
child_task = self.deserialize_task(workflow, child_elem)
|
||||
task.children.append(child_task)
|
||||
|
||||
state_name = elem.findtext('state')
|
||||
found = False
|
||||
for key, value in list(TaskStateNames.items()):
|
||||
if value == state_name:
|
||||
task._state = key
|
||||
found = True
|
||||
break
|
||||
assert found
|
||||
task.triggered = elem.find('triggered') is not None
|
||||
task.last_state_change = float(elem.findtext('last-state-change'))
|
||||
task.data = self.deserialize_value_map(elem.find('data'))
|
||||
internal_data_elem = elem.find('internal-data')
|
||||
task.internal_data = self.deserialize_value_map(internal_data_elem)
|
||||
|
||||
return task
|
|
@ -0,0 +1 @@
|
|||
from .tasks import CallActivityParser
|
|
@ -0,0 +1,20 @@
|
|||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from SpiffWorkflow.bpmn.parser.util import xpath_eval
|
||||
|
||||
class SignavioBpmnParser(BpmnParser):
|
||||
|
||||
def add_bpmn_xml(self, bpmn, filename=None):
|
||||
# signavio sometimes disconnects a BoundaryEvent from it's owning task
|
||||
# They then show up as intermediateCatchEvents without any incoming
|
||||
# sequence flows. Check for this case before parsing the XML.
|
||||
xpath = xpath_eval(bpmn)
|
||||
for catch_event in xpath('.//bpmn:intermediateCatchEvent'):
|
||||
incoming = xpath('.//bpmn:sequenceFlow[@targetRef="%s"]' % catch_event.get('id'))
|
||||
if not incoming:
|
||||
raise ValidationException(
|
||||
'Intermediate Catch Event has no incoming sequences. '
|
||||
'This might be a Boundary Event that has been '
|
||||
'disconnected.',
|
||||
node=catch_event, filename=filename)
|
||||
return super().add_bpmn_xml(bpmn, filename)
|
|
@ -0,0 +1,45 @@
|
|||
from SpiffWorkflow.bpmn.parser.task_parsers import TaskParser
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from SpiffWorkflow.bpmn.parser.util import one, xpath_eval
|
||||
|
||||
SIGNAVIO_NS = 'http://www.signavio.com'
|
||||
|
||||
|
||||
class CallActivityParser(TaskParser):
|
||||
"""Parses a CallActivity node."""
|
||||
|
||||
def create_task(self):
|
||||
subworkflow_spec = self.get_subprocess_spec()
|
||||
return self.spec_class(
|
||||
self.spec, self.get_task_spec_name(), subworkflow_spec,
|
||||
lane=self.lane,
|
||||
position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
def get_subprocess_spec(self):
|
||||
called_element = self.node.get('calledElement', None) or self._fix_call_activities()
|
||||
parser = self.process_parser.parser.get_process_parser(called_element)
|
||||
if parser is None:
|
||||
raise ValidationException(
|
||||
f"The process '{called_element}' was not found. Did you mean one of the following: "
|
||||
f"{', '.join(self.process_parser.parser.get_process_ids())}?",
|
||||
node=self.node,
|
||||
filename=self.process_parser.filename)
|
||||
return parser.get_id()
|
||||
|
||||
def _fix_call_activities(self):
|
||||
"""
|
||||
Signavio produces slightly invalid BPMN for call activity nodes... It
|
||||
is supposed to put a reference to the id of the called process in to
|
||||
the calledElement attribute. Instead it stores a string (which is the
|
||||
name of the process - not its ID, in our interpretation) in an
|
||||
extension tag.
|
||||
"""
|
||||
signavio_meta_data = xpath_eval(self.node, extra_ns={
|
||||
'signavio': SIGNAVIO_NS})(
|
||||
'.//signavio:signavioMetaData[@metaKey="entry"]')
|
||||
if not signavio_meta_data:
|
||||
raise ValidationException(
|
||||
'No Signavio "Subprocess reference" specified.',
|
||||
node=self.node, filename=self.filename)
|
||||
return one(signavio_meta_data).get('metaValue')
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue