Feature/background proc with celery (#788)
* WIP: some initial test code to test out celery w/ burnettk * some cleanup for celery and added base model to put tasks waiting on timers * removed dup bpmn file * some more cleanup and added strategy to queue instructions * some minor code changes w/ burnettk * remove the unused next_task key from api calls since nobody uses it w/ burnettk essweine * added migration for future tasks and added test to make sure we are inserting into it w/ burnettk essweine * ensure future task run at time can be updated w/ burnettk * added table to queue instructions for end user in w/ burnettk * added test to ensure we are storing instructions for end users w/ burnettk * added progress page to display new instructions to user * ignore dup instructions on db insert w/ burnettk * some more updates for celery w/ burnettk * some pyl and test fixes w/ burnettk * fixed tests w/ burnettk * WIP: added in page to show instructions on pi show page w/ burnettk * pi show page is fully using not interstitial now w/ burnettk * fixed broken test w/ burnettk * moved background processing items to own module w/ burnettk * fixed apscheduler start script * updated celery task queue to handle future tasks and upgraded black and set its line-length to match ruff w/ burnettk * added support to run future tasks using countdown w/ burnettk * build image for celery branch w/ burnettk * poet does not exist in the image w/ burnettk * start blocking scheduler should always start the scheduler w/ burnettk * add init and stuff for this branch * make this work not just on my mac * send other args to only * added running status for process instance and use that on fe to go to show page and added additional identifier to locking system to isolate celery workers better w/ burnettk * fixed typing error that typeguard found, not sure why mypy did not w/ burnettk * do not check for no instructions on interstitial page for cypress tests on frontend w/ burnettk * do not queue process instances twice w/ burnettk * removed bad file w/ burnettk * queue tasks using strings to avoid circular imports when attmepting to queue w/ burnettk * only queue imminent new timer events and mock celery * some keyboard shortcut support on frontend and added ability to force run a process instance over the api w/ burnettk * some styles added for the shortcut menu w/ burnettk * pyl w/ burnettk * fixed test w/ burnettk * removed temporary celery script and added support for celery worker in run server locally w/ burnettk * cleaned up migrations w/ burnettk * created new migration to clean up old migrations --------- Co-authored-by: jasquat <jasquat@users.noreply.github.com> Co-authored-by: burnettk <burnettk@users.noreply.github.com>
This commit is contained in:
parent
8d72ef5cfd
commit
18600189c8
|
@ -32,7 +32,7 @@ on:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- spiffdemo
|
- spiffdemo
|
||||||
- feature/no-data-for-finished-spiff-tasks
|
- feature/background-proc-with-celery
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create_frontend_docker_image:
|
create_frontend_docker_image:
|
||||||
|
|
|
@ -18,7 +18,7 @@ repos:
|
||||||
# --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth
|
# --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth
|
||||||
# kind of prefer long lines rather than cutely-formatted sets of lines.
|
# kind of prefer long lines rather than cutely-formatted sets of lines.
|
||||||
# TODO: enable when its safe to update the files
|
# TODO: enable when its safe to update the files
|
||||||
args: [--preview, --line-length, "119"]
|
args: [--preview, --line-length, "130"]
|
||||||
|
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
files: ^spiffworkflow-backend/
|
files: ^spiffworkflow-backend/
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "22.2.0"
|
version = "22.2.0"
|
||||||
description = "Classes Without Boilerplate"
|
description = "Classes Without Boilerplate"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -23,7 +22,6 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy
|
||||||
name = "bandit"
|
name = "bandit"
|
||||||
version = "1.7.2"
|
version = "1.7.2"
|
||||||
description = "Security oriented static analyser for python code."
|
description = "Security oriented static analyser for python code."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -46,7 +44,6 @@ yaml = ["PyYAML"]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "23.1.0"
|
version = "23.1.0"
|
||||||
description = "The uncompromising code formatter."
|
description = "The uncompromising code formatter."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -94,7 +91,6 @@ uvloop = ["uvloop (>=0.15.2)"]
|
||||||
name = "cfgv"
|
name = "cfgv"
|
||||||
version = "3.3.1"
|
version = "3.3.1"
|
||||||
description = "Validate configuration and produce human readable error messages."
|
description = "Validate configuration and produce human readable error messages."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6.1"
|
python-versions = ">=3.6.1"
|
||||||
files = [
|
files = [
|
||||||
|
@ -106,7 +102,6 @@ files = [
|
||||||
name = "classify-imports"
|
name = "classify-imports"
|
||||||
version = "4.2.0"
|
version = "4.2.0"
|
||||||
description = "Utilities for refactoring imports in python-like syntax."
|
description = "Utilities for refactoring imports in python-like syntax."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -118,7 +113,6 @@ files = [
|
||||||
name = "click"
|
name = "click"
|
||||||
version = "8.1.3"
|
version = "8.1.3"
|
||||||
description = "Composable command line interface toolkit"
|
description = "Composable command line interface toolkit"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -133,7 +127,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
name = "colorama"
|
name = "colorama"
|
||||||
version = "0.4.6"
|
version = "0.4.6"
|
||||||
description = "Cross-platform colored terminal text."
|
description = "Cross-platform colored terminal text."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -145,7 +138,6 @@ files = [
|
||||||
name = "distlib"
|
name = "distlib"
|
||||||
version = "0.3.6"
|
version = "0.3.6"
|
||||||
description = "Distribution utilities"
|
description = "Distribution utilities"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -157,7 +149,6 @@ files = [
|
||||||
name = "docutils"
|
name = "docutils"
|
||||||
version = "0.19"
|
version = "0.19"
|
||||||
description = "Docutils -- Python Documentation Utilities"
|
description = "Docutils -- Python Documentation Utilities"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -169,7 +160,6 @@ files = [
|
||||||
name = "filelock"
|
name = "filelock"
|
||||||
version = "3.10.7"
|
version = "3.10.7"
|
||||||
description = "A platform independent file lock."
|
description = "A platform independent file lock."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -185,7 +175,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "p
|
||||||
name = "flake8"
|
name = "flake8"
|
||||||
version = "4.0.1"
|
version = "4.0.1"
|
||||||
description = "the modular source code checker: pep8 pyflakes and co"
|
description = "the modular source code checker: pep8 pyflakes and co"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -202,7 +191,6 @@ pyflakes = ">=2.4.0,<2.5.0"
|
||||||
name = "flake8-bandit"
|
name = "flake8-bandit"
|
||||||
version = "2.1.2"
|
version = "2.1.2"
|
||||||
description = "Automated security testing with bandit and flake8."
|
description = "Automated security testing with bandit and flake8."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -219,7 +207,6 @@ pycodestyle = "*"
|
||||||
name = "flake8-bugbear"
|
name = "flake8-bugbear"
|
||||||
version = "22.12.6"
|
version = "22.12.6"
|
||||||
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -238,7 +225,6 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"]
|
||||||
name = "flake8-docstrings"
|
name = "flake8-docstrings"
|
||||||
version = "1.7.0"
|
version = "1.7.0"
|
||||||
description = "Extension for flake8 which uses pydocstyle to check docstrings"
|
description = "Extension for flake8 which uses pydocstyle to check docstrings"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -254,7 +240,6 @@ pydocstyle = ">=2.1"
|
||||||
name = "flake8-polyfill"
|
name = "flake8-polyfill"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
description = "Polyfill package for Flake8 plugins"
|
description = "Polyfill package for Flake8 plugins"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -269,7 +254,6 @@ flake8 = "*"
|
||||||
name = "flake8-rst-docstrings"
|
name = "flake8-rst-docstrings"
|
||||||
version = "0.2.7"
|
version = "0.2.7"
|
||||||
description = "Python docstring reStructuredText (RST) validator"
|
description = "Python docstring reStructuredText (RST) validator"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -286,7 +270,6 @@ restructuredtext-lint = "*"
|
||||||
name = "gitdb"
|
name = "gitdb"
|
||||||
version = "4.0.10"
|
version = "4.0.10"
|
||||||
description = "Git Object Database"
|
description = "Git Object Database"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -301,7 +284,6 @@ smmap = ">=3.0.1,<6"
|
||||||
name = "gitpython"
|
name = "gitpython"
|
||||||
version = "3.1.36"
|
version = "3.1.36"
|
||||||
description = "GitPython is a Python library used to interact with Git repositories"
|
description = "GitPython is a Python library used to interact with Git repositories"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -319,7 +301,6 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit"
|
||||||
name = "identify"
|
name = "identify"
|
||||||
version = "2.5.22"
|
version = "2.5.22"
|
||||||
description = "File identification library for Python"
|
description = "File identification library for Python"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -334,7 +315,6 @@ license = ["ukkonen"]
|
||||||
name = "mccabe"
|
name = "mccabe"
|
||||||
version = "0.6.1"
|
version = "0.6.1"
|
||||||
description = "McCabe checker, plugin for flake8"
|
description = "McCabe checker, plugin for flake8"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -346,7 +326,6 @@ files = [
|
||||||
name = "mypy-extensions"
|
name = "mypy-extensions"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
description = "Type system extensions for programs checked with the mypy type checker."
|
description = "Type system extensions for programs checked with the mypy type checker."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.5"
|
||||||
files = [
|
files = [
|
||||||
|
@ -358,7 +337,6 @@ files = [
|
||||||
name = "nodeenv"
|
name = "nodeenv"
|
||||||
version = "1.7.0"
|
version = "1.7.0"
|
||||||
description = "Node.js virtual environment builder"
|
description = "Node.js virtual environment builder"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
|
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -373,7 +351,6 @@ setuptools = "*"
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "23.0"
|
version = "23.0"
|
||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -385,7 +362,6 @@ files = [
|
||||||
name = "pathspec"
|
name = "pathspec"
|
||||||
version = "0.11.1"
|
version = "0.11.1"
|
||||||
description = "Utility library for gitignore style pattern matching of file paths."
|
description = "Utility library for gitignore style pattern matching of file paths."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -397,7 +373,6 @@ files = [
|
||||||
name = "pbr"
|
name = "pbr"
|
||||||
version = "5.11.1"
|
version = "5.11.1"
|
||||||
description = "Python Build Reasonableness"
|
description = "Python Build Reasonableness"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.6"
|
python-versions = ">=2.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -409,7 +384,6 @@ files = [
|
||||||
name = "platformdirs"
|
name = "platformdirs"
|
||||||
version = "3.2.0"
|
version = "3.2.0"
|
||||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -425,7 +399,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-
|
||||||
name = "pre-commit"
|
name = "pre-commit"
|
||||||
version = "2.21.0"
|
version = "2.21.0"
|
||||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -444,7 +417,6 @@ virtualenv = ">=20.10.0"
|
||||||
name = "pre-commit-hooks"
|
name = "pre-commit-hooks"
|
||||||
version = "4.4.0"
|
version = "4.4.0"
|
||||||
description = "Some out-of-the-box hooks for pre-commit."
|
description = "Some out-of-the-box hooks for pre-commit."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -459,7 +431,6 @@ files = [
|
||||||
name = "pycodestyle"
|
name = "pycodestyle"
|
||||||
version = "2.8.0"
|
version = "2.8.0"
|
||||||
description = "Python style guide checker"
|
description = "Python style guide checker"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -471,7 +442,6 @@ files = [
|
||||||
name = "pydocstyle"
|
name = "pydocstyle"
|
||||||
version = "6.3.0"
|
version = "6.3.0"
|
||||||
description = "Python docstring style checker"
|
description = "Python docstring style checker"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -489,7 +459,6 @@ toml = ["tomli (>=1.2.3)"]
|
||||||
name = "pyflakes"
|
name = "pyflakes"
|
||||||
version = "2.4.0"
|
version = "2.4.0"
|
||||||
description = "passive checker of Python programs"
|
description = "passive checker of Python programs"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -501,7 +470,6 @@ files = [
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.14.0"
|
version = "2.14.0"
|
||||||
description = "Pygments is a syntax highlighting package written in Python."
|
description = "Pygments is a syntax highlighting package written in Python."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -516,7 +484,6 @@ plugins = ["importlib-metadata"]
|
||||||
name = "pyupgrade"
|
name = "pyupgrade"
|
||||||
version = "3.3.1"
|
version = "3.3.1"
|
||||||
description = "A tool to automatically upgrade syntax for newer versions."
|
description = "A tool to automatically upgrade syntax for newer versions."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -531,7 +498,6 @@ tokenize-rt = ">=3.2.0"
|
||||||
name = "pyyaml"
|
name = "pyyaml"
|
||||||
version = "6.0"
|
version = "6.0"
|
||||||
description = "YAML parser and emitter for Python"
|
description = "YAML parser and emitter for Python"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -581,7 +547,6 @@ files = [
|
||||||
name = "reorder-python-imports"
|
name = "reorder-python-imports"
|
||||||
version = "3.9.0"
|
version = "3.9.0"
|
||||||
description = "Tool for reordering python imports"
|
description = "Tool for reordering python imports"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -596,7 +561,6 @@ classify-imports = ">=4.1"
|
||||||
name = "restructuredtext-lint"
|
name = "restructuredtext-lint"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
description = "reStructuredText linter"
|
description = "reStructuredText linter"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -610,7 +574,6 @@ docutils = ">=0.11,<1.0"
|
||||||
name = "ruamel-yaml"
|
name = "ruamel-yaml"
|
||||||
version = "0.17.21"
|
version = "0.17.21"
|
||||||
description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
|
description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3"
|
python-versions = ">=3"
|
||||||
files = [
|
files = [
|
||||||
|
@ -626,7 +589,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.0.270"
|
version = "0.0.270"
|
||||||
description = "An extremely fast Python linter, written in Rust."
|
description = "An extremely fast Python linter, written in Rust."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -653,7 +615,6 @@ files = [
|
||||||
name = "setuptools"
|
name = "setuptools"
|
||||||
version = "67.6.1"
|
version = "67.6.1"
|
||||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -670,7 +631,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (
|
||||||
name = "smmap"
|
name = "smmap"
|
||||||
version = "5.0.0"
|
version = "5.0.0"
|
||||||
description = "A pure Python implementation of a sliding window memory map manager"
|
description = "A pure Python implementation of a sliding window memory map manager"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
|
@ -682,7 +642,6 @@ files = [
|
||||||
name = "snowballstemmer"
|
name = "snowballstemmer"
|
||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
|
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
|
@ -694,7 +653,6 @@ files = [
|
||||||
name = "stevedore"
|
name = "stevedore"
|
||||||
version = "5.0.0"
|
version = "5.0.0"
|
||||||
description = "Manage dynamic plugins for Python applications"
|
description = "Manage dynamic plugins for Python applications"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
|
@ -709,7 +667,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0"
|
||||||
name = "tokenize-rt"
|
name = "tokenize-rt"
|
||||||
version = "5.0.0"
|
version = "5.0.0"
|
||||||
description = "A wrapper around the stdlib `tokenize` which roundtrips."
|
description = "A wrapper around the stdlib `tokenize` which roundtrips."
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -721,7 +678,6 @@ files = [
|
||||||
name = "tomli"
|
name = "tomli"
|
||||||
version = "2.0.1"
|
version = "2.0.1"
|
||||||
description = "A lil' TOML parser"
|
description = "A lil' TOML parser"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
@ -733,7 +689,6 @@ files = [
|
||||||
name = "virtualenv"
|
name = "virtualenv"
|
||||||
version = "20.21.0"
|
version = "20.21.0"
|
||||||
description = "Virtual Python Environment builder"
|
description = "Virtual Python Environment builder"
|
||||||
category = "dev"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
|
|
|
@ -67,9 +67,7 @@ def main() -> None:
|
||||||
put_serializer_version_onto_numeric_track()
|
put_serializer_version_onto_numeric_track()
|
||||||
process_instances = all_potentially_relevant_process_instances()
|
process_instances = all_potentially_relevant_process_instances()
|
||||||
potentially_relevant_instance_count = len(process_instances)
|
potentially_relevant_instance_count = len(process_instances)
|
||||||
current_app.logger.debug(
|
current_app.logger.debug(f"Found potentially relevant process_instances: {potentially_relevant_instance_count}")
|
||||||
f"Found potentially relevant process_instances: {potentially_relevant_instance_count}"
|
|
||||||
)
|
|
||||||
if potentially_relevant_instance_count > 0:
|
if potentially_relevant_instance_count > 0:
|
||||||
run_version_1()
|
run_version_1()
|
||||||
# this will run while using the new per instance on demand data migration framework
|
# this will run while using the new per instance on demand data migration framework
|
||||||
|
|
|
@ -20,9 +20,7 @@ def main():
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
"""Print process instance count."""
|
"""Print process instance count."""
|
||||||
process_instances = ProcessInstanceModel.query.filter_by(
|
process_instances = ProcessInstanceModel.query.filter_by(process_model_identifier=process_model_identifier_ticket).all()
|
||||||
process_model_identifier=process_model_identifier_ticket
|
|
||||||
).all()
|
|
||||||
process_instance_count = len(process_instances)
|
process_instance_count = len(process_instances)
|
||||||
print(f"process_instance_count: {process_instance_count}")
|
print(f"process_instance_count: {process_instance_count}")
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,10 @@ fi
|
||||||
port="${SPIFFWORKFLOW_BACKEND_PORT:-7000}"
|
port="${SPIFFWORKFLOW_BACKEND_PORT:-7000}"
|
||||||
|
|
||||||
process_model_dir="${1:-}"
|
process_model_dir="${1:-}"
|
||||||
|
if [[ -d "$process_model_dir" ]]; then
|
||||||
|
shift
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||||
if [[ -n "${process_model_dir}" ]] && [[ -d "${process_model_dir}" ]]; then
|
if [[ -n "${process_model_dir}" ]] && [[ -d "${process_model_dir}" ]]; then
|
||||||
|
|
|
@ -7,9 +7,11 @@ function error_handler() {
|
||||||
trap 'error_handler ${LINENO} $?' ERR
|
trap 'error_handler ${LINENO} $?' ERR
|
||||||
set -o errtrace -o errexit -o nounset -o pipefail
|
set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
script="$1"
|
||||||
|
shift
|
||||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||||
. "${script_dir}/local_development_environment_setup"
|
. "${script_dir}/local_development_environment_setup"
|
||||||
|
|
||||||
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false
|
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false
|
||||||
|
|
||||||
poet run python "$@"
|
poet run python "$script"
|
||||||
|
|
|
@ -66,5 +66,10 @@ fi
|
||||||
|
|
||||||
result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}/${process_instance_id}/run" -H "Authorization: Bearer $access_token")
|
result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}/${process_instance_id}/run" -H "Authorization: Bearer $access_token")
|
||||||
check_result_for_error "$result"
|
check_result_for_error "$result"
|
||||||
|
if [[ "$(jq -r '.status' <<<"$result")" == "complete" ]]; then
|
||||||
|
echo "Process instance completed"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
next_task=$(jq '.next_task' <<<"$result")
|
next_task=$(jq '.next_task' <<<"$result")
|
||||||
process_next_task "$next_task"
|
process_next_task "$next_task"
|
||||||
|
|
|
@ -12,9 +12,27 @@ if ! command -v pydeps >/dev/null 2>&1; then
|
||||||
pip install pydeps
|
pip install pydeps
|
||||||
fi
|
fi
|
||||||
|
|
||||||
more_args=''
|
pydeps_args=()
|
||||||
|
|
||||||
if [[ "${1:-}" == "r" ]]; then
|
if [[ "${1:-}" == "r" ]]; then
|
||||||
more_args='--rankdir LR'
|
shift
|
||||||
|
pydeps_args+=("--rankdir" "LR")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
pydeps src/spiffworkflow_backend --display "/Applications/Google Chrome.app/Contents/MacOS/Google Chrome" --only spiffworkflow_backend --rmprefix spiffworkflow_backend. --exclude-exact spiffworkflow_backend.services.custom_parser spiffworkflow_backend.specs spiffworkflow_backend.services spiffworkflow_backend spiffworkflow_backend.models spiffworkflow_backend.load_database_models spiffworkflow_backend.routes --exclude spiffworkflow_backend.config spiffworkflow_backend.interfaces spiffworkflow_backend.models.db $more_args
|
# add other args to only
|
||||||
|
# example usage:
|
||||||
|
# ./bin/run_pydeps spiffworkflow_backend.services.process_instance_processor spiffworkflow_backend.services.process_instance_service spiffworkflow_backend.background_processing spiffworkflow_backend.routes.tasks_controller spiffworkflow_backend.services.workflow_execution_service
|
||||||
|
if [[ -n "${1:-}" ]]; then
|
||||||
|
pydeps_args+=("--only")
|
||||||
|
for arg in "$@"; do
|
||||||
|
pydeps_args+=("$arg")
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -f "/Applications/Google Chrome.app/Contents/MacOS/Google Chrome" ]]; then
|
||||||
|
pydeps_args+=("--display" "/Applications/Google Chrome.app/Contents/MacOS/Google Chrome")
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# the only at the end is specific to this branch
|
||||||
|
pydeps src/spiffworkflow_backend --only spiffworkflow_backend --rmprefix spiffworkflow_backend. --exclude-exact spiffworkflow_backend.background_processing spiffworkflow_backend.services.custom_parser spiffworkflow_backend.specs spiffworkflow_backend.services spiffworkflow_backend spiffworkflow_backend.models spiffworkflow_backend.load_database_models spiffworkflow_backend.routes --exclude spiffworkflow_backend.config spiffworkflow_backend.interfaces spiffworkflow_backend.models.db "${pydeps_args[@]}"
|
||||||
|
|
|
@ -10,6 +10,11 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||||
. "${script_dir}/local_development_environment_setup"
|
. "${script_dir}/local_development_environment_setup"
|
||||||
|
|
||||||
|
server_type="${1:-api}"
|
||||||
|
|
||||||
|
if [[ "$server_type" == "celery_worker" ]]; then
|
||||||
|
"${script_dir}/start_celery_worker"
|
||||||
|
else
|
||||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
|
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
|
||||||
./bin/boot_server_in_docker
|
./bin/boot_server_in_docker
|
||||||
else
|
else
|
||||||
|
@ -22,3 +27,4 @@ else
|
||||||
# this line blocks
|
# this line blocks
|
||||||
poetry run flask run -p "$port" --host=0.0.0.0
|
poetry run flask run -p "$port" --host=0.0.0.0
|
||||||
fi
|
fi
|
||||||
|
fi
|
||||||
|
|
|
@ -12,10 +12,7 @@ def main() -> None:
|
||||||
failing_process_models = DataSetupService.save_all_process_models()
|
failing_process_models = DataSetupService.save_all_process_models()
|
||||||
for bpmn_errors in failing_process_models:
|
for bpmn_errors in failing_process_models:
|
||||||
print(bpmn_errors)
|
print(bpmn_errors)
|
||||||
if (
|
if os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") != "false" and len(failing_process_models) > 0:
|
||||||
os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") != "false"
|
|
||||||
and len(failing_process_models) > 0
|
|
||||||
):
|
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ import time
|
||||||
|
|
||||||
from apscheduler.schedulers.background import BlockingScheduler # type: ignore
|
from apscheduler.schedulers.background import BlockingScheduler # type: ignore
|
||||||
from spiffworkflow_backend import create_app
|
from spiffworkflow_backend import create_app
|
||||||
from spiffworkflow_backend import start_scheduler
|
from spiffworkflow_backend.background_processing.apscheduler import start_apscheduler
|
||||||
from spiffworkflow_backend.data_migrations.version_1_3 import VersionOneThree
|
from spiffworkflow_backend.data_migrations.version_1_3 import VersionOneThree
|
||||||
from spiffworkflow_backend.helpers.db_helper import try_to_connect
|
from spiffworkflow_backend.helpers.db_helper import try_to_connect
|
||||||
|
|
||||||
|
@ -23,11 +23,8 @@ def main() -> None:
|
||||||
VersionOneThree().run()
|
VersionOneThree().run()
|
||||||
|
|
||||||
end_time = time.time()
|
end_time = time.time()
|
||||||
print(
|
print(f"done running data migration from background processor. took {end_time - start_time} seconds. starting scheduler")
|
||||||
f"done running data migration from background processor. took {end_time - start_time} seconds. starting"
|
start_apscheduler(app, BlockingScheduler)
|
||||||
" scheduler"
|
|
||||||
)
|
|
||||||
start_scheduler(app, BlockingScheduler)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
function error_handler() {
|
||||||
|
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||||
|
exit "$2"
|
||||||
|
}
|
||||||
|
trap 'error_handler ${LINENO} $?' ERR
|
||||||
|
set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
export SPIFFWORKFLOW_BACKEND_CELERY_ENABLED=true
|
||||||
|
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false
|
||||||
|
|
||||||
|
poetry run celery -A src.spiffworkflow_backend.background_processing.celery_worker worker --loglevel=info -c 12
|
|
@ -0,0 +1,62 @@
|
||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 441dca328887
|
||||||
|
Revises: 1b5a9f7af28e
|
||||||
|
Create Date: 2023-12-05 10:36:32.487659
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '441dca328887'
|
||||||
|
down_revision = '1b5a9f7af28e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('future_task',
|
||||||
|
sa.Column('guid', sa.String(length=36), nullable=False),
|
||||||
|
sa.Column('run_at_in_seconds', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('completed', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('guid')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('future_task', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_future_task_completed'), ['completed'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_future_task_run_at_in_seconds'), ['run_at_in_seconds'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('task_instructions_for_end_user',
|
||||||
|
sa.Column('task_guid', sa.String(length=36), nullable=False),
|
||||||
|
sa.Column('instruction', sa.Text(), nullable=False),
|
||||||
|
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('has_been_retrieved', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('task_guid')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('task_instructions_for_end_user', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_task_instructions_for_end_user_has_been_retrieved'), ['has_been_retrieved'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_task_instructions_for_end_user_process_instance_id'), ['process_instance_id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_task_instructions_for_end_user_timestamp'), ['timestamp'], unique=False)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('task_instructions_for_end_user', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_task_instructions_for_end_user_timestamp'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_task_instructions_for_end_user_process_instance_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_task_instructions_for_end_user_has_been_retrieved'))
|
||||||
|
|
||||||
|
op.drop_table('task_instructions_for_end_user')
|
||||||
|
with op.batch_alter_table('future_task', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_future_task_run_at_in_seconds'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_future_task_completed'))
|
||||||
|
|
||||||
|
op.drop_table('future_task')
|
||||||
|
# ### end Alembic commands ###
|
|
@ -19,6 +19,20 @@ typing-extensions = ">=4"
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tz = ["python-dateutil"]
|
tz = ["python-dateutil"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "amqp"
|
||||||
|
version = "5.2.0"
|
||||||
|
description = "Low-level AMQP client for Python (fork of amqplib)."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"},
|
||||||
|
{file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
vine = ">=5.0.0,<6.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aniso8601"
|
name = "aniso8601"
|
||||||
version = "9.0.1"
|
version = "9.0.1"
|
||||||
|
@ -62,6 +76,17 @@ tornado = ["tornado (>=4.3)"]
|
||||||
twisted = ["twisted"]
|
twisted = ["twisted"]
|
||||||
zookeeper = ["kazoo"]
|
zookeeper = ["kazoo"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-timeout"
|
||||||
|
version = "4.0.3"
|
||||||
|
description = "Timeout context manager for asyncio programs"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
|
||||||
|
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "22.2.0"
|
version = "22.2.0"
|
||||||
|
@ -137,32 +162,51 @@ tests = ["pytest (>=3.2.1,!=3.3.0)"]
|
||||||
typecheck = ["mypy"]
|
typecheck = ["mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "billiard"
|
||||||
version = "22.12.0"
|
version = "4.2.0"
|
||||||
description = "The uncompromising code formatter."
|
description = "Python multiprocessing fork with improvements and bugfixes"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
|
{file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"},
|
||||||
{file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
|
{file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"},
|
||||||
{file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
|
]
|
||||||
{file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
|
|
||||||
{file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
|
[[package]]
|
||||||
{file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
|
name = "black"
|
||||||
{file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
|
version = "23.11.0"
|
||||||
{file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
|
description = "The uncompromising code formatter."
|
||||||
{file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
|
optional = false
|
||||||
{file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
|
python-versions = ">=3.8"
|
||||||
{file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
|
files = [
|
||||||
{file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
|
{file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"},
|
||||||
|
{file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"},
|
||||||
|
{file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"},
|
||||||
|
{file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"},
|
||||||
|
{file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"},
|
||||||
|
{file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"},
|
||||||
|
{file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"},
|
||||||
|
{file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"},
|
||||||
|
{file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"},
|
||||||
|
{file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"},
|
||||||
|
{file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"},
|
||||||
|
{file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"},
|
||||||
|
{file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"},
|
||||||
|
{file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"},
|
||||||
|
{file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"},
|
||||||
|
{file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"},
|
||||||
|
{file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"},
|
||||||
|
{file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
click = ">=8.0.0"
|
click = ">=8.0.0"
|
||||||
mypy-extensions = ">=0.4.3"
|
mypy-extensions = ">=0.4.3"
|
||||||
|
packaging = ">=22.0"
|
||||||
pathspec = ">=0.9.0"
|
pathspec = ">=0.9.0"
|
||||||
platformdirs = ">=2"
|
platformdirs = ">=2"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
|
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||||
|
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
colorama = ["colorama (>=0.4.3)"]
|
colorama = ["colorama (>=0.4.3)"]
|
||||||
|
@ -192,6 +236,77 @@ files = [
|
||||||
{file = "cachelib-0.10.2.tar.gz", hash = "sha256:593faeee62a7c037d50fc835617a01b887503f972fb52b188ae7e50e9cb69740"},
|
{file = "cachelib-0.10.2.tar.gz", hash = "sha256:593faeee62a7c037d50fc835617a01b887503f972fb52b188ae7e50e9cb69740"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "celery"
|
||||||
|
version = "5.3.5"
|
||||||
|
description = "Distributed Task Queue."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "celery-5.3.5-py3-none-any.whl", hash = "sha256:30b75ac60fb081c2d9f8881382c148ed7c9052031a75a1e8743ff4b4b071f184"},
|
||||||
|
{file = "celery-5.3.5.tar.gz", hash = "sha256:6b65d8dd5db499dd6190c45aa6398e171b99592f2af62c312f7391587feb5458"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
billiard = ">=4.2.0,<5.0"
|
||||||
|
click = ">=8.1.2,<9.0"
|
||||||
|
click-didyoumean = ">=0.3.0"
|
||||||
|
click-plugins = ">=1.1.1"
|
||||||
|
click-repl = ">=0.2.0"
|
||||||
|
kombu = ">=5.3.3,<6.0"
|
||||||
|
python-dateutil = ">=2.8.2"
|
||||||
|
redis = {version = ">=4.5.2,<4.5.5 || >4.5.5,<6.0.0", optional = true, markers = "extra == \"redis\""}
|
||||||
|
tzdata = ">=2022.7"
|
||||||
|
vine = ">=5.1.0,<6.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
arangodb = ["pyArango (>=2.0.2)"]
|
||||||
|
auth = ["cryptography (==41.0.5)"]
|
||||||
|
azureblockblob = ["azure-storage-blob (>=12.15.0)"]
|
||||||
|
brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"]
|
||||||
|
cassandra = ["cassandra-driver (>=3.25.0,<4)"]
|
||||||
|
consul = ["python-consul2 (==0.1.5)"]
|
||||||
|
cosmosdbsql = ["pydocumentdb (==2.3.5)"]
|
||||||
|
couchbase = ["couchbase (>=3.0.0)"]
|
||||||
|
couchdb = ["pycouchdb (==1.14.2)"]
|
||||||
|
django = ["Django (>=2.2.28)"]
|
||||||
|
dynamodb = ["boto3 (>=1.26.143)"]
|
||||||
|
elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.10.1)"]
|
||||||
|
eventlet = ["eventlet (>=0.32.0)"]
|
||||||
|
gevent = ["gevent (>=1.5.0)"]
|
||||||
|
librabbitmq = ["librabbitmq (>=2.0.0)"]
|
||||||
|
memcache = ["pylibmc (==1.6.3)"]
|
||||||
|
mongodb = ["pymongo[srv] (>=4.0.2)"]
|
||||||
|
msgpack = ["msgpack (==1.0.7)"]
|
||||||
|
pymemcache = ["python-memcached (==1.59)"]
|
||||||
|
pyro = ["pyro4 (==4.82)"]
|
||||||
|
pytest = ["pytest-celery (==0.0.0)"]
|
||||||
|
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
|
||||||
|
s3 = ["boto3 (>=1.26.143)"]
|
||||||
|
slmq = ["softlayer-messaging (>=1.0.3)"]
|
||||||
|
solar = ["ephem (==4.1.5)"]
|
||||||
|
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
|
||||||
|
sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
|
||||||
|
tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"]
|
||||||
|
yaml = ["PyYAML (>=3.10)"]
|
||||||
|
zookeeper = ["kazoo (>=1.3.1)"]
|
||||||
|
zstd = ["zstandard (==0.22.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "celery-stubs"
|
||||||
|
version = "0.1.3"
|
||||||
|
description = "celery stubs"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "celery-stubs-0.1.3.tar.gz", hash = "sha256:0fb5345820f8a2bd14e6ffcbef2d10181e12e40f8369f551d7acc99d8d514919"},
|
||||||
|
{file = "celery_stubs-0.1.3-py3-none-any.whl", hash = "sha256:dfb9ad27614a8af028b2055bb4a4ae99ca5e9a8d871428a506646d62153218d7"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
mypy = ">=0.950"
|
||||||
|
typing-extensions = ">=4.2.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2023.7.22"
|
version = "2023.7.22"
|
||||||
|
@ -388,6 +503,55 @@ files = [
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-didyoumean"
|
||||||
|
version = "0.3.0"
|
||||||
|
description = "Enables git-like *did-you-mean* feature in click"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6.2,<4.0.0"
|
||||||
|
files = [
|
||||||
|
{file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"},
|
||||||
|
{file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-plugins"
|
||||||
|
version = "1.1.1"
|
||||||
|
description = "An extension module for click to enable registering CLI commands via setuptools entry-points."
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"},
|
||||||
|
{file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=4.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-repl"
|
||||||
|
version = "0.3.0"
|
||||||
|
description = "REPL plugin for Click"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"},
|
||||||
|
{file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=7.0"
|
||||||
|
prompt-toolkit = ">=3.0.36"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clickclick"
|
name = "clickclick"
|
||||||
version = "20.10.2"
|
version = "20.10.2"
|
||||||
|
@ -1094,6 +1258,38 @@ pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
|
||||||
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
|
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
|
||||||
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
|
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "kombu"
|
||||||
|
version = "5.3.4"
|
||||||
|
description = "Messaging library for Python."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "kombu-5.3.4-py3-none-any.whl", hash = "sha256:63bb093fc9bb80cfb3a0972336a5cec1fa7ac5f9ef7e8237c6bf8dda9469313e"},
|
||||||
|
{file = "kombu-5.3.4.tar.gz", hash = "sha256:0bb2e278644d11dea6272c17974a3dbb9688a949f3bb60aeb5b791329c44fadc"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
amqp = ">=5.1.1,<6.0.0"
|
||||||
|
vine = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
azureservicebus = ["azure-servicebus (>=7.10.0)"]
|
||||||
|
azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"]
|
||||||
|
confluentkafka = ["confluent-kafka (>=2.2.0)"]
|
||||||
|
consul = ["python-consul2"]
|
||||||
|
librabbitmq = ["librabbitmq (>=2.0.0)"]
|
||||||
|
mongodb = ["pymongo (>=4.1.1)"]
|
||||||
|
msgpack = ["msgpack"]
|
||||||
|
pyro = ["pyro4"]
|
||||||
|
qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"]
|
||||||
|
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
|
||||||
|
slmq = ["softlayer-messaging (>=1.0.3)"]
|
||||||
|
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
|
||||||
|
sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
|
||||||
|
yaml = ["PyYAML (>=3.10)"]
|
||||||
|
zookeeper = ["kazoo (>=2.8.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lxml"
|
name = "lxml"
|
||||||
version = "4.9.2"
|
version = "4.9.2"
|
||||||
|
@ -1426,18 +1622,15 @@ test = ["blinker", "cryptography", "mock", "nose", "pyjwt (>=1.0.0)", "unittest2
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "21.3"
|
version = "23.2"
|
||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
|
||||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pathspec"
|
name = "pathspec"
|
||||||
version = "0.11.1"
|
version = "0.11.1"
|
||||||
|
@ -1552,6 +1745,20 @@ files = [
|
||||||
flask = "*"
|
flask = "*"
|
||||||
prometheus-client = "*"
|
prometheus-client = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prompt-toolkit"
|
||||||
|
version = "3.0.41"
|
||||||
|
description = "Library for building powerful interactive command lines in Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7.0"
|
||||||
|
files = [
|
||||||
|
{file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"},
|
||||||
|
{file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
wcwidth = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psycopg2"
|
name = "psycopg2"
|
||||||
version = "2.9.6"
|
version = "2.9.6"
|
||||||
|
@ -1616,20 +1823,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte
|
||||||
docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
|
docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
|
||||||
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyparsing"
|
|
||||||
version = "3.0.9"
|
|
||||||
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.8"
|
|
||||||
files = [
|
|
||||||
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
|
|
||||||
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
diagrams = ["jinja2", "railroad-diagrams"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyrsistent"
|
name = "pyrsistent"
|
||||||
version = "0.19.3"
|
version = "0.19.3"
|
||||||
|
@ -1867,6 +2060,24 @@ files = [
|
||||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redis"
|
||||||
|
version = "5.0.1"
|
||||||
|
description = "Python client for Redis database and key-value store"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"},
|
||||||
|
{file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
hiredis = ["hiredis (>=1.0.0)"]
|
||||||
|
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "2023.3.23"
|
version = "2023.3.23"
|
||||||
|
@ -2082,25 +2293,28 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "safety"
|
name = "safety"
|
||||||
version = "2.3.5"
|
version = "2.4.0b2"
|
||||||
description = "Checks installed dependencies for known vulnerabilities and licenses."
|
description = "Checks installed dependencies for known vulnerabilities and licenses."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
{file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"},
|
{file = "safety-2.4.0b2-py3-none-any.whl", hash = "sha256:63773ce92e17f5f80e7dff4c8a25d8abb7d62d375897b5f3bb4afe9313b100ff"},
|
||||||
{file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"},
|
{file = "safety-2.4.0b2.tar.gz", hash = "sha256:9907010c6ca7720861ca7fa1496bdb80449b0619ca136eb7ac7e02bd3516cd4f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
Click = ">=8.0.2"
|
Click = ">=8.0.2"
|
||||||
dparse = ">=0.6.2"
|
dparse = ">=0.6.2"
|
||||||
packaging = ">=21.0,<22.0"
|
jinja2 = {version = ">=3.1.0", markers = "python_version >= \"3.7\""}
|
||||||
|
marshmallow = {version = ">=3.15.0", markers = "python_version >= \"3.7\""}
|
||||||
|
packaging = ">=21.0"
|
||||||
requests = "*"
|
requests = "*"
|
||||||
"ruamel.yaml" = ">=0.17.21"
|
"ruamel.yaml" = ">=0.17.21"
|
||||||
setuptools = ">=19.3"
|
setuptools = {version = ">=65.5.1", markers = "python_version >= \"3.7\""}
|
||||||
|
urllib3 = ">=1.26.5"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"]
|
github = ["pygithub (>=1.43.3)"]
|
||||||
gitlab = ["python-gitlab (>=1.3.0)"]
|
gitlab = ["python-gitlab (>=1.3.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2676,6 +2890,17 @@ secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.
|
||||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
zstd = ["zstandard (>=0.18.0)"]
|
zstd = ["zstandard (>=0.18.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vine"
|
||||||
|
version = "5.1.0"
|
||||||
|
description = "Python promises."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"},
|
||||||
|
{file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "virtualenv"
|
name = "virtualenv"
|
||||||
version = "20.21.0"
|
version = "20.21.0"
|
||||||
|
@ -2696,6 +2921,17 @@ platformdirs = ">=2.4,<4"
|
||||||
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
|
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
|
||||||
test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
|
test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wcwidth"
|
||||||
|
version = "0.2.10"
|
||||||
|
description = "Measures the displayed width of unicode strings in a terminal"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "wcwidth-0.2.10-py2.py3-none-any.whl", hash = "sha256:aec5179002dd0f0d40c456026e74a729661c9d468e1ed64405e3a6c2176ca36f"},
|
||||||
|
{file = "wcwidth-0.2.10.tar.gz", hash = "sha256:390c7454101092a6a5e43baad8f83de615463af459201709556b6e4b1c861f97"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "werkzeug"
|
name = "werkzeug"
|
||||||
version = "2.3.8"
|
version = "2.3.8"
|
||||||
|
@ -2732,34 +2968,32 @@ email = ["email-validator"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xdoctest"
|
name = "xdoctest"
|
||||||
version = "1.1.1"
|
version = "1.1.2"
|
||||||
description = "A rewrite of the builtin doctest module"
|
description = "A rewrite of the builtin doctest module"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"},
|
{file = "xdoctest-1.1.2-py3-none-any.whl", hash = "sha256:ebe133222534f09597cbe461f97cc5f95ad7b36e5d31f3437caffb9baaddbddb"},
|
||||||
{file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"},
|
{file = "xdoctest-1.1.2.tar.gz", hash = "sha256:267d3d4e362547fa917d3deabaf6888232bbf43c8d30298faeb957dbfa7e0ba3"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""}
|
colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""}
|
||||||
Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""}
|
Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""}
|
||||||
six = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"]
|
all = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "tomli (>=0.2.0)", "typing (>=3.7.4)"]
|
||||||
all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"]
|
all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "tomli (==0.2.0)", "typing (==3.7.4)"]
|
||||||
colors = ["Pygments", "Pygments", "colorama"]
|
colors = ["Pygments", "Pygments", "colorama"]
|
||||||
jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"]
|
jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "nbconvert"]
|
||||||
optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"]
|
optional = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "tomli (>=0.2.0)"]
|
||||||
optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
|
optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
|
||||||
runtime-strict = ["six (==1.11.0)"]
|
tests = ["pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "typing (>=3.7.4)"]
|
||||||
tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"]
|
|
||||||
tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"]
|
tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"]
|
||||||
tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"]
|
tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"]
|
||||||
tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"]
|
tests-strict = ["pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.10,<3.12"
|
python-versions = ">=3.10,<3.12"
|
||||||
content-hash = "c30e0e07342a1e7b34bed4ae9722c0604f561a83e220b1b423049dde6c61c122"
|
content-hash = "470406c5ff0f63983a4fffb90c0a9101c1abcf07fb4ea2a9414b8cdd16aa2f60"
|
||||||
|
|
|
@ -77,6 +77,8 @@ spiff-element-units = "^0.3.1"
|
||||||
mysqlclient = "^2.2.0"
|
mysqlclient = "^2.2.0"
|
||||||
flask-session = "^0.5.0"
|
flask-session = "^0.5.0"
|
||||||
flask-oauthlib = "^0.9.6"
|
flask-oauthlib = "^0.9.6"
|
||||||
|
celery = {extras = ["redis"], version = "^5.3.5"}
|
||||||
|
celery-stubs = "^0.1.3"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^7.1.2"
|
pytest = "^7.1.2"
|
||||||
|
|
|
@ -1,22 +1,18 @@
|
||||||
import faulthandler
|
import faulthandler
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import connexion # type: ignore
|
import connexion # type: ignore
|
||||||
import flask.app
|
import flask.app
|
||||||
import flask.json
|
import flask.json
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
|
|
||||||
from apscheduler.schedulers.base import BaseScheduler # type: ignore
|
|
||||||
from flask.json.provider import DefaultJSONProvider
|
from flask.json.provider import DefaultJSONProvider
|
||||||
from flask_cors import CORS # type: ignore
|
from flask_cors import CORS # type: ignore
|
||||||
from flask_mail import Mail # type: ignore
|
from flask_mail import Mail # type: ignore
|
||||||
from prometheus_flask_exporter import ConnexionPrometheusMetrics # type: ignore
|
|
||||||
from werkzeug.exceptions import NotFound
|
|
||||||
|
|
||||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||||
|
from spiffworkflow_backend.background_processing.apscheduler import start_apscheduler_if_appropriate
|
||||||
|
from spiffworkflow_backend.background_processing.celery import init_celery_if_appropriate
|
||||||
from spiffworkflow_backend.config import setup_config
|
from spiffworkflow_backend.config import setup_config
|
||||||
from spiffworkflow_backend.exceptions.api_error import api_error_blueprint
|
from spiffworkflow_backend.exceptions.api_error import api_error_blueprint
|
||||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||||
|
@ -27,7 +23,8 @@ from spiffworkflow_backend.routes.authentication_controller import verify_token
|
||||||
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint
|
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint
|
||||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||||
from spiffworkflow_backend.services.background_processing_service import BackgroundProcessingService
|
from spiffworkflow_backend.services.monitoring_service import configure_sentry
|
||||||
|
from spiffworkflow_backend.services.monitoring_service import setup_prometheus_metrics
|
||||||
|
|
||||||
|
|
||||||
class MyJSONEncoder(DefaultJSONProvider):
|
class MyJSONEncoder(DefaultJSONProvider):
|
||||||
|
@ -55,60 +52,6 @@ class MyJSONEncoder(DefaultJSONProvider):
|
||||||
return super().dumps(obj, **kwargs)
|
return super().dumps(obj, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None:
|
|
||||||
scheduler = scheduler_class()
|
|
||||||
|
|
||||||
# TODO: polling intervals for messages job
|
|
||||||
polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"]
|
|
||||||
not_started_polling_interval_in_seconds = app.config[
|
|
||||||
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS"
|
|
||||||
]
|
|
||||||
user_input_required_polling_interval_in_seconds = app.config[
|
|
||||||
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS"
|
|
||||||
]
|
|
||||||
# TODO: add job to release locks to simplify other queries
|
|
||||||
# TODO: add job to delete completed entires
|
|
||||||
# TODO: add job to run old/low priority instances so they do not get drowned out
|
|
||||||
|
|
||||||
scheduler.add_job(
|
|
||||||
BackgroundProcessingService(app).process_message_instances_with_app_context,
|
|
||||||
"interval",
|
|
||||||
seconds=10,
|
|
||||||
)
|
|
||||||
scheduler.add_job(
|
|
||||||
BackgroundProcessingService(app).process_not_started_process_instances,
|
|
||||||
"interval",
|
|
||||||
seconds=not_started_polling_interval_in_seconds,
|
|
||||||
)
|
|
||||||
scheduler.add_job(
|
|
||||||
BackgroundProcessingService(app).process_waiting_process_instances,
|
|
||||||
"interval",
|
|
||||||
seconds=polling_interval_in_seconds,
|
|
||||||
)
|
|
||||||
scheduler.add_job(
|
|
||||||
BackgroundProcessingService(app).process_user_input_required_process_instances,
|
|
||||||
"interval",
|
|
||||||
seconds=user_input_required_polling_interval_in_seconds,
|
|
||||||
)
|
|
||||||
scheduler.add_job(
|
|
||||||
BackgroundProcessingService(app).remove_stale_locks,
|
|
||||||
"interval",
|
|
||||||
seconds=app.config["MAX_INSTANCE_LOCK_DURATION_IN_SECONDS"],
|
|
||||||
)
|
|
||||||
scheduler.start()
|
|
||||||
|
|
||||||
|
|
||||||
def should_start_scheduler(app: flask.app.Flask) -> bool:
|
|
||||||
if not app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP"]:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# do not start the scheduler twice in flask debug mode but support code reloading
|
|
||||||
if app.config["ENV_IDENTIFIER"] == "local_development" and os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def create_app() -> flask.app.Flask:
|
def create_app() -> flask.app.Flask:
|
||||||
faulthandler.enable()
|
faulthandler.enable()
|
||||||
|
|
||||||
|
@ -121,7 +64,7 @@ def create_app() -> flask.app.Flask:
|
||||||
app = connexion_app.app
|
app = connexion_app.app
|
||||||
app.config["CONNEXION_APP"] = connexion_app
|
app.config["CONNEXION_APP"] = connexion_app
|
||||||
app.config["SESSION_TYPE"] = "filesystem"
|
app.config["SESSION_TYPE"] = "filesystem"
|
||||||
_setup_prometheus_metrics(app, connexion_app)
|
setup_prometheus_metrics(app, connexion_app)
|
||||||
|
|
||||||
setup_config(app)
|
setup_config(app)
|
||||||
db.init_app(app)
|
db.init_app(app)
|
||||||
|
@ -134,9 +77,7 @@ def create_app() -> flask.app.Flask:
|
||||||
# preflight options requests will be allowed if they meet the requirements of the url regex.
|
# preflight options requests will be allowed if they meet the requirements of the url regex.
|
||||||
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't
|
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't
|
||||||
# need to continually keep asking for the same path.
|
# need to continually keep asking for the same path.
|
||||||
origins_re = [
|
origins_re = [r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]]
|
||||||
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]
|
|
||||||
]
|
|
||||||
CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
|
CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
|
||||||
|
|
||||||
connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
|
connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
|
||||||
|
@ -146,9 +87,6 @@ def create_app() -> flask.app.Flask:
|
||||||
|
|
||||||
app.json = MyJSONEncoder(app)
|
app.json = MyJSONEncoder(app)
|
||||||
|
|
||||||
if should_start_scheduler(app):
|
|
||||||
start_scheduler(app)
|
|
||||||
|
|
||||||
configure_sentry(app)
|
configure_sentry(app)
|
||||||
|
|
||||||
app.before_request(verify_token)
|
app.before_request(verify_token)
|
||||||
|
@ -159,104 +97,7 @@ def create_app() -> flask.app.Flask:
|
||||||
# This is particularly helpful for forms that are generated from json schemas.
|
# This is particularly helpful for forms that are generated from json schemas.
|
||||||
app.json.sort_keys = False
|
app.json.sort_keys = False
|
||||||
|
|
||||||
|
start_apscheduler_if_appropriate(app)
|
||||||
|
init_celery_if_appropriate(app)
|
||||||
|
|
||||||
return app # type: ignore
|
return app # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def get_version_info_data() -> dict[str, Any]:
|
|
||||||
version_info_data_dict = {}
|
|
||||||
if os.path.isfile("version_info.json"):
|
|
||||||
with open("version_info.json") as f:
|
|
||||||
version_info_data_dict = json.load(f)
|
|
||||||
return version_info_data_dict
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_prometheus_metrics(app: flask.app.Flask, connexion_app: connexion.apps.flask_app.FlaskApp) -> None:
|
|
||||||
metrics = ConnexionPrometheusMetrics(connexion_app)
|
|
||||||
app.config["PROMETHEUS_METRICS"] = metrics
|
|
||||||
version_info_data = get_version_info_data()
|
|
||||||
if len(version_info_data) > 0:
|
|
||||||
# prometheus does not allow periods in key names
|
|
||||||
version_info_data_normalized = {k.replace(".", "_"): v for k, v in version_info_data.items()}
|
|
||||||
metrics.info("version_info", "Application Version Info", **version_info_data_normalized)
|
|
||||||
|
|
||||||
|
|
||||||
def traces_sampler(sampling_context: Any) -> Any:
|
|
||||||
# always inherit
|
|
||||||
if sampling_context["parent_sampled"] is not None:
|
|
||||||
return sampling_context["parent_sampled"]
|
|
||||||
|
|
||||||
if "wsgi_environ" in sampling_context:
|
|
||||||
wsgi_environ = sampling_context["wsgi_environ"]
|
|
||||||
path_info = wsgi_environ.get("PATH_INFO")
|
|
||||||
request_method = wsgi_environ.get("REQUEST_METHOD")
|
|
||||||
|
|
||||||
# tasks_controller.task_submit
|
|
||||||
# this is the current pain point as of 31 jan 2023.
|
|
||||||
if path_info and (
|
|
||||||
(path_info.startswith("/v1.0/tasks/") and request_method == "PUT")
|
|
||||||
or (path_info.startswith("/v1.0/task-data/") and request_method == "GET")
|
|
||||||
):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Default sample rate for all others (replaces traces_sample_rate)
|
|
||||||
return 0.01
|
|
||||||
|
|
||||||
|
|
||||||
def configure_sentry(app: flask.app.Flask) -> None:
|
|
||||||
import sentry_sdk
|
|
||||||
from sentry_sdk.integrations.flask import FlaskIntegration
|
|
||||||
|
|
||||||
# get rid of NotFound errors
|
|
||||||
def before_send(event: Any, hint: Any) -> Any:
|
|
||||||
if "exc_info" in hint:
|
|
||||||
_exc_type, exc_value, _tb = hint["exc_info"]
|
|
||||||
# NotFound is mostly from web crawlers
|
|
||||||
if isinstance(exc_value, NotFound):
|
|
||||||
return None
|
|
||||||
return event
|
|
||||||
|
|
||||||
sentry_errors_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE")
|
|
||||||
if sentry_errors_sample_rate is None:
|
|
||||||
raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow")
|
|
||||||
|
|
||||||
sentry_traces_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE")
|
|
||||||
if sentry_traces_sample_rate is None:
|
|
||||||
raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow")
|
|
||||||
|
|
||||||
sentry_env_identifier = app.config["ENV_IDENTIFIER"]
|
|
||||||
if app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER"):
|
|
||||||
sentry_env_identifier = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER")
|
|
||||||
|
|
||||||
sentry_configs = {
|
|
||||||
"dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
|
|
||||||
"integrations": [
|
|
||||||
FlaskIntegration(),
|
|
||||||
],
|
|
||||||
"environment": sentry_env_identifier,
|
|
||||||
# sample_rate is the errors sample rate. we usually set it to 1 (100%)
|
|
||||||
# so we get all errors in sentry.
|
|
||||||
"sample_rate": float(sentry_errors_sample_rate),
|
|
||||||
# Set traces_sample_rate to capture a certain percentage
|
|
||||||
# of transactions for performance monitoring.
|
|
||||||
# We recommend adjusting this value to less than 1(00%) in production.
|
|
||||||
"traces_sample_rate": float(sentry_traces_sample_rate),
|
|
||||||
"traces_sampler": traces_sampler,
|
|
||||||
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
|
||||||
"before_send": before_send,
|
|
||||||
}
|
|
||||||
|
|
||||||
# https://docs.sentry.io/platforms/python/configuration/releases
|
|
||||||
version_info_data = get_version_info_data()
|
|
||||||
if len(version_info_data) > 0:
|
|
||||||
git_commit = version_info_data.get("org.opencontainers.image.revision") or version_info_data.get("git_commit")
|
|
||||||
if git_commit is not None:
|
|
||||||
sentry_configs["release"] = git_commit
|
|
||||||
|
|
||||||
if app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED"):
|
|
||||||
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
|
|
||||||
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
|
|
||||||
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
|
||||||
if profiles_sample_rate > 0:
|
|
||||||
sentry_configs["_experiments"] = {"profiles_sample_rate": profiles_sample_rate}
|
|
||||||
|
|
||||||
sentry_sdk.init(**sentry_configs)
|
|
||||||
|
|
|
@ -1200,10 +1200,10 @@ paths:
|
||||||
description: The unique id of an existing process instance.
|
description: The unique id of an existing process instance.
|
||||||
schema:
|
schema:
|
||||||
type: integer
|
type: integer
|
||||||
- name: do_engine_steps
|
- name: force_run
|
||||||
in: query
|
in: query
|
||||||
required: false
|
required: false
|
||||||
description: Defaults to true, can be set to false if you are just looking at the workflow not completeing it.
|
description: Force the process instance to run even if it has already been started.
|
||||||
schema:
|
schema:
|
||||||
type: boolean
|
type: boolean
|
||||||
post:
|
post:
|
||||||
|
@ -1728,6 +1728,29 @@ paths:
|
||||||
items:
|
items:
|
||||||
$ref: "#/components/schemas/Task"
|
$ref: "#/components/schemas/Task"
|
||||||
|
|
||||||
|
/tasks/progress/{process_instance_id}:
|
||||||
|
parameters:
|
||||||
|
- name: process_instance_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The unique id of an existing process instance.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
get:
|
||||||
|
tags:
|
||||||
|
- Process Instances
|
||||||
|
operationId: spiffworkflow_backend.routes.tasks_controller.process_instance_progress
|
||||||
|
summary: returns the list of instructions that have been queued for a process instance.
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: list of task instructions
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/TaskInstructionsForEndUser"
|
||||||
|
|
||||||
/users/search:
|
/users/search:
|
||||||
parameters:
|
parameters:
|
||||||
- name: username_prefix
|
- name: username_prefix
|
||||||
|
@ -2191,6 +2214,27 @@ paths:
|
||||||
$ref: "#/components/schemas/Task"
|
$ref: "#/components/schemas/Task"
|
||||||
|
|
||||||
|
|
||||||
|
/tasks/{process_instance_id}/instruction:
|
||||||
|
parameters:
|
||||||
|
- name: process_instance_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The unique id of an existing process instance.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
get:
|
||||||
|
tags:
|
||||||
|
- Tasks
|
||||||
|
operationId: spiffworkflow_backend.routes.tasks_controller.task_with_instruction
|
||||||
|
summary: Gets the next task and its instructions
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: One task
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Task"
|
||||||
|
|
||||||
/tasks/{process_instance_id}/{task_guid}:
|
/tasks/{process_instance_id}/{task_guid}:
|
||||||
parameters:
|
parameters:
|
||||||
- name: task_guid
|
- name: task_guid
|
||||||
|
@ -3040,6 +3084,16 @@ components:
|
||||||
documentation: "# Heading 1\n\nMarkdown documentation text goes here"
|
documentation: "# Heading 1\n\nMarkdown documentation text goes here"
|
||||||
type: form
|
type: form
|
||||||
state: ready
|
state: ready
|
||||||
|
TaskInstructionsForEndUser:
|
||||||
|
properties:
|
||||||
|
task_guid:
|
||||||
|
type: string
|
||||||
|
process_instance_id:
|
||||||
|
type: integer
|
||||||
|
instruction:
|
||||||
|
type: string
|
||||||
|
timestamp:
|
||||||
|
type: number
|
||||||
TaskAllowsGuest:
|
TaskAllowsGuest:
|
||||||
properties:
|
properties:
|
||||||
allows_guest:
|
allows_guest:
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
CELERY_TASK_PROCESS_INSTANCE_RUN = (
|
||||||
|
"spiffworkflow_backend.background_processing.celery_tasks.process_instance_task.celery_task_process_instance_run"
|
||||||
|
)
|
|
@ -0,0 +1,103 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import flask.wrappers
|
||||||
|
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
|
||||||
|
from apscheduler.schedulers.base import BaseScheduler # type: ignore
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing.background_processing_service import BackgroundProcessingService
|
||||||
|
|
||||||
|
|
||||||
|
def should_start_apscheduler(app: flask.app.Flask) -> bool:
|
||||||
|
if not app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP"]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# do not start the scheduler twice in flask debug mode but support code reloading
|
||||||
|
if app.config["ENV_IDENTIFIER"] == "local_development" and os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def start_apscheduler_if_appropriate(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None:
|
||||||
|
if not should_start_apscheduler(app):
|
||||||
|
return None
|
||||||
|
|
||||||
|
start_apscheduler(app, scheduler_class)
|
||||||
|
|
||||||
|
|
||||||
|
def start_apscheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None:
|
||||||
|
scheduler = scheduler_class()
|
||||||
|
|
||||||
|
if app.config["SPIFFWORKFLOW_BACKEND_CELERY_ENABLED"]:
|
||||||
|
_add_jobs_for_celery_based_configuration(app, scheduler)
|
||||||
|
else:
|
||||||
|
_add_jobs_for_non_celery_based_configuration(app, scheduler)
|
||||||
|
|
||||||
|
_add_jobs_relevant_for_all_celery_configurations(app, scheduler)
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
|
||||||
|
def _add_jobs_for_celery_based_configuration(app: flask.app.Flask, scheduler: BaseScheduler) -> None:
|
||||||
|
future_task_execution_interval_in_seconds = app.config[
|
||||||
|
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_FUTURE_TASK_EXECUTION_INTERVAL_IN_SECONDS"
|
||||||
|
]
|
||||||
|
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).process_future_tasks,
|
||||||
|
"interval",
|
||||||
|
seconds=future_task_execution_interval_in_seconds,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_jobs_for_non_celery_based_configuration(app: flask.app.Flask, scheduler: BaseScheduler) -> None:
|
||||||
|
# TODO: polling intervals for messages job
|
||||||
|
polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"]
|
||||||
|
user_input_required_polling_interval_in_seconds = app.config[
|
||||||
|
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS"
|
||||||
|
]
|
||||||
|
# TODO: add job to release locks to simplify other queries
|
||||||
|
# TODO: add job to delete completed entires
|
||||||
|
# TODO: add job to run old/low priority instances so they do not get drowned out
|
||||||
|
|
||||||
|
# we should be able to remove these once we switch over to future tasks for non-celery configuration
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).process_waiting_process_instances,
|
||||||
|
"interval",
|
||||||
|
seconds=polling_interval_in_seconds,
|
||||||
|
)
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).process_running_process_instances,
|
||||||
|
"interval",
|
||||||
|
seconds=polling_interval_in_seconds,
|
||||||
|
)
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).process_user_input_required_process_instances,
|
||||||
|
"interval",
|
||||||
|
seconds=user_input_required_polling_interval_in_seconds,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_jobs_relevant_for_all_celery_configurations(app: flask.app.Flask, scheduler: BaseScheduler) -> None:
|
||||||
|
not_started_polling_interval_in_seconds = app.config[
|
||||||
|
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS"
|
||||||
|
]
|
||||||
|
|
||||||
|
# TODO: see if we can queue with celery instead on celery based configuration
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).process_message_instances_with_app_context,
|
||||||
|
"interval",
|
||||||
|
seconds=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
# when you create a process instance via the API and do not use the run API method, this would pick up the instance.
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).process_not_started_process_instances,
|
||||||
|
"interval",
|
||||||
|
seconds=not_started_polling_interval_in_seconds,
|
||||||
|
)
|
||||||
|
scheduler.add_job(
|
||||||
|
BackgroundProcessingService(app).remove_stale_locks,
|
||||||
|
"interval",
|
||||||
|
seconds=app.config["MAX_INSTANCE_LOCK_DURATION_IN_SECONDS"],
|
||||||
|
)
|
|
@ -1,6 +1,15 @@
|
||||||
import flask
|
import time
|
||||||
|
|
||||||
|
import flask
|
||||||
|
from sqlalchemy import and_
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_future_task_if_appropriate,
|
||||||
|
)
|
||||||
|
from spiffworkflow_backend.models.future_task import FutureTaskModel
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||||
|
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||||
from spiffworkflow_backend.services.message_service import MessageService
|
from spiffworkflow_backend.services.message_service import MessageService
|
||||||
from spiffworkflow_backend.services.process_instance_lock_service import ProcessInstanceLockService
|
from spiffworkflow_backend.services.process_instance_lock_service import ProcessInstanceLockService
|
||||||
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
|
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
|
||||||
|
@ -24,6 +33,12 @@ class BackgroundProcessingService:
|
||||||
ProcessInstanceLockService.set_thread_local_locking_context("bg:waiting")
|
ProcessInstanceLockService.set_thread_local_locking_context("bg:waiting")
|
||||||
ProcessInstanceService.do_waiting(ProcessInstanceStatus.waiting.value)
|
ProcessInstanceService.do_waiting(ProcessInstanceStatus.waiting.value)
|
||||||
|
|
||||||
|
def process_running_process_instances(self) -> None:
|
||||||
|
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||||
|
with self.app.app_context():
|
||||||
|
ProcessInstanceLockService.set_thread_local_locking_context("bg:running")
|
||||||
|
ProcessInstanceService.do_waiting(ProcessInstanceStatus.running.value)
|
||||||
|
|
||||||
def process_user_input_required_process_instances(self) -> None:
|
def process_user_input_required_process_instances(self) -> None:
|
||||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||||
with self.app.app_context():
|
with self.app.app_context():
|
||||||
|
@ -40,3 +55,26 @@ class BackgroundProcessingService:
|
||||||
"""If something has been locked for a certain amount of time it is probably stale so unlock it."""
|
"""If something has been locked for a certain amount of time it is probably stale so unlock it."""
|
||||||
with self.app.app_context():
|
with self.app.app_context():
|
||||||
ProcessInstanceLockService.remove_stale_locks()
|
ProcessInstanceLockService.remove_stale_locks()
|
||||||
|
|
||||||
|
def process_future_tasks(self) -> None:
|
||||||
|
"""If something has been locked for a certain amount of time it is probably stale so unlock it."""
|
||||||
|
with self.app.app_context():
|
||||||
|
future_task_lookahead_in_seconds = self.app.config[
|
||||||
|
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_FUTURE_TASK_LOOKAHEAD_IN_SECONDS"
|
||||||
|
]
|
||||||
|
lookahead = time.time() + future_task_lookahead_in_seconds
|
||||||
|
future_tasks = FutureTaskModel.query.filter(
|
||||||
|
and_(
|
||||||
|
FutureTaskModel.completed == False, # noqa: E712
|
||||||
|
FutureTaskModel.run_at_in_seconds < lookahead,
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
for future_task in future_tasks:
|
||||||
|
process_instance = (
|
||||||
|
ProcessInstanceModel.query.join(TaskModel, TaskModel.process_instance_id == ProcessInstanceModel.id)
|
||||||
|
.filter(TaskModel.guid == future_task.guid)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
queue_future_task_if_appropriate(
|
||||||
|
process_instance, eta_in_seconds=future_task.run_at_in_seconds, task_guid=future_task.guid
|
||||||
|
)
|
|
@ -0,0 +1,34 @@
|
||||||
|
import flask.wrappers
|
||||||
|
from celery import Celery
|
||||||
|
from celery import Task
|
||||||
|
|
||||||
|
|
||||||
|
def init_celery_if_appropriate(app: flask.app.Flask) -> None:
|
||||||
|
if app.config["SPIFFWORKFLOW_BACKEND_CELERY_ENABLED"]:
|
||||||
|
celery_app = celery_init_app(app)
|
||||||
|
app.celery_app = celery_app
|
||||||
|
|
||||||
|
|
||||||
|
def celery_init_app(app: flask.app.Flask) -> Celery:
|
||||||
|
class FlaskTask(Task):
|
||||||
|
def __call__(self, *args: object, **kwargs: object) -> object:
|
||||||
|
with app.app_context():
|
||||||
|
return self.run(*args, **kwargs) # type: ignore
|
||||||
|
|
||||||
|
celery_configs = {
|
||||||
|
"broker_url": app.config["SPIFFWORKFLOW_BACKEND_CELERY_BROKER_URL"],
|
||||||
|
"result_backend": app.config["SPIFFWORKFLOW_BACKEND_CELERY_RESULT_BACKEND"],
|
||||||
|
"task_ignore_result": True,
|
||||||
|
"task_serializer": "json",
|
||||||
|
"result_serializer": "json",
|
||||||
|
"accept_content": ["json"],
|
||||||
|
"enable_utc": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
celery_app = Celery(app.name)
|
||||||
|
celery_app.Task = FlaskTask # type: ignore
|
||||||
|
celery_app.config_from_object(celery_configs)
|
||||||
|
celery_app.conf.update(app.config)
|
||||||
|
celery_app.set_default()
|
||||||
|
app.celery_app = celery_app
|
||||||
|
return celery_app
|
|
@ -0,0 +1,53 @@
|
||||||
|
from billiard import current_process # type: ignore
|
||||||
|
from celery import shared_task
|
||||||
|
from flask import current_app
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_process_instance_if_appropriate,
|
||||||
|
)
|
||||||
|
from spiffworkflow_backend.models.db import db
|
||||||
|
from spiffworkflow_backend.models.future_task import FutureTaskModel
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
from spiffworkflow_backend.services.process_instance_lock_service import ProcessInstanceLockService
|
||||||
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
|
||||||
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
||||||
|
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
|
||||||
|
from spiffworkflow_backend.services.workflow_execution_service import TaskRunnability
|
||||||
|
|
||||||
|
ten_minutes = 60 * 10
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(ignore_result=False, time_limit=ten_minutes)
|
||||||
|
def celery_task_process_instance_run(process_instance_id: int, task_guid: str | None = None) -> None:
|
||||||
|
proc_index = current_process().index
|
||||||
|
ProcessInstanceLockService.set_thread_local_locking_context("celery:worker", additional_processing_identifier=proc_index)
|
||||||
|
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||||
|
try:
|
||||||
|
with ProcessInstanceQueueService.dequeued(process_instance, additional_processing_identifier=proc_index):
|
||||||
|
ProcessInstanceService.run_process_instance_with_processor(
|
||||||
|
process_instance, execution_strategy_name="run_current_ready_tasks", additional_processing_identifier=proc_index
|
||||||
|
)
|
||||||
|
processor, task_runnability = ProcessInstanceService.run_process_instance_with_processor(
|
||||||
|
process_instance,
|
||||||
|
execution_strategy_name="queue_instructions_for_end_user",
|
||||||
|
additional_processing_identifier=proc_index,
|
||||||
|
)
|
||||||
|
if task_guid is not None:
|
||||||
|
future_task = FutureTaskModel.query.filter_by(completed=False, guid=task_guid).first()
|
||||||
|
if future_task is not None:
|
||||||
|
future_task.completed = True
|
||||||
|
db.session.add(future_task)
|
||||||
|
db.session.commit()
|
||||||
|
if task_runnability == TaskRunnability.has_ready_tasks:
|
||||||
|
queue_process_instance_if_appropriate(process_instance)
|
||||||
|
except ProcessInstanceIsAlreadyLockedError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
db.session.rollback() # in case the above left the database with a bad transaction
|
||||||
|
error_message = (
|
||||||
|
f"Error running process_instance {process_instance.id}" + f"({process_instance.process_model_identifier}). {str(e)}"
|
||||||
|
)
|
||||||
|
current_app.logger.error(error_message)
|
||||||
|
db.session.add(process_instance)
|
||||||
|
db.session.commit()
|
||||||
|
raise e
|
|
@ -0,0 +1,38 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
import celery
|
||||||
|
from flask import current_app
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing import CELERY_TASK_PROCESS_INSTANCE_RUN
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
|
||||||
|
|
||||||
|
def queue_enabled_for_process_model(process_instance: ProcessInstanceModel) -> bool:
|
||||||
|
# TODO: check based on the process model itself as well
|
||||||
|
return current_app.config["SPIFFWORKFLOW_BACKEND_CELERY_ENABLED"] is True
|
||||||
|
|
||||||
|
|
||||||
|
def queue_future_task_if_appropriate(process_instance: ProcessInstanceModel, eta_in_seconds: float, task_guid: str) -> bool:
|
||||||
|
if queue_enabled_for_process_model(process_instance):
|
||||||
|
buffer = 1
|
||||||
|
countdown = eta_in_seconds - time.time() + buffer
|
||||||
|
args_to_celery = {"process_instance_id": process_instance.id, "task_guid": task_guid}
|
||||||
|
# add buffer to countdown to avoid rounding issues and race conditions with spiff. the situation we want to avoid is where
|
||||||
|
# we think the timer said to run it at 6:34:11, and we initialize the SpiffWorkflow library,
|
||||||
|
# expecting the timer to be ready, but the library considered it ready a little after that time
|
||||||
|
# (maybe due to subsecond stuff, maybe because of clock skew within the cluster of computers running spiff)
|
||||||
|
# celery_task_process_instance_run.apply_async(kwargs=args_to_celery, countdown=countdown + 1) # type: ignore
|
||||||
|
|
||||||
|
celery.current_app.send_task(CELERY_TASK_PROCESS_INSTANCE_RUN, kwargs=args_to_celery, countdown=countdown)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# if waiting, check all waiting tasks and see if theyt are timers. if they are timers, it's not runnable.
|
||||||
|
def queue_process_instance_if_appropriate(process_instance: ProcessInstanceModel) -> bool:
|
||||||
|
if queue_enabled_for_process_model(process_instance):
|
||||||
|
celery.current_app.send_task(CELERY_TASK_PROCESS_INSTANCE_RUN, (process_instance.id,))
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
|
@ -0,0 +1,7 @@
|
||||||
|
from spiffworkflow_backend import create_app
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task import (
|
||||||
|
celery_task_process_instance_run, # noqa: F401
|
||||||
|
)
|
||||||
|
|
||||||
|
the_flask_app = create_app()
|
||||||
|
setting_variable_to_make_celery_happy_no_idea_how_this_works = the_flask_app.celery_app
|
|
@ -90,9 +90,7 @@ def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
|
||||||
else:
|
else:
|
||||||
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",")
|
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",")
|
||||||
if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3:
|
if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3:
|
||||||
raise ConfigurationError(
|
raise ConfigurationError("SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields")
|
||||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_extension_api_configs(app: Flask) -> None:
|
def _check_extension_api_configs(app: Flask) -> None:
|
||||||
|
@ -239,6 +237,13 @@ def setup_config(app: Flask) -> None:
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if app.config["SPIFFWORKFLOW_BACKEND_CELERY_ENABLED"]:
|
||||||
|
app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"] = "queue_instructions_for_end_user"
|
||||||
|
app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"] = "queue_instructions_for_end_user"
|
||||||
|
else:
|
||||||
|
app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"] = "greedy"
|
||||||
|
app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"] = "run_until_user_message"
|
||||||
|
|
||||||
thread_local_data = threading.local()
|
thread_local_data = threading.local()
|
||||||
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
||||||
_set_up_tenant_specific_fields_as_list_of_strings(app)
|
_set_up_tenant_specific_fields_as_list_of_strings(app)
|
||||||
|
|
|
@ -49,9 +49,16 @@ config_from_env("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP",
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS", default=True)
|
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS", default=True)
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS", default=10)
|
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS", default=10)
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS", default=30)
|
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS", default=30)
|
||||||
config_from_env(
|
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS", default=120)
|
||||||
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS", default=120
|
|
||||||
)
|
### background with celery
|
||||||
|
config_from_env("SPIFFWORKFLOW_BACKEND_CELERY_ENABLED", default=False)
|
||||||
|
config_from_env("SPIFFWORKFLOW_BACKEND_CELERY_BROKER_URL", default="redis://localhost")
|
||||||
|
config_from_env("SPIFFWORKFLOW_BACKEND_CELERY_RESULT_BACKEND", default="redis://localhost")
|
||||||
|
|
||||||
|
# give a little overlap to ensure we do not miss items although the query will handle it either way
|
||||||
|
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_FUTURE_TASK_LOOKAHEAD_IN_SECONDS", default=301)
|
||||||
|
config_from_env("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_FUTURE_TASK_EXECUTION_INTERVAL_IN_SECONDS", default=300)
|
||||||
|
|
||||||
### frontend
|
### frontend
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001")
|
config_from_env("SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001")
|
||||||
|
@ -147,10 +154,6 @@ config_from_env("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL")
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET")
|
config_from_env("SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET")
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH")
|
config_from_env("SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH")
|
||||||
|
|
||||||
### engine
|
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND", default="greedy")
|
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="run_until_user_message")
|
|
||||||
|
|
||||||
### element units
|
### element units
|
||||||
# disabling until we fix the "no such directory" error so we do not keep sending cypress errors
|
# disabling until we fix the "no such directory" error so we do not keep sending cypress errors
|
||||||
config_from_env("SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", default="src/instance/element-unit-cache")
|
config_from_env("SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", default="src/instance/element-unit-cache")
|
||||||
|
|
|
@ -8,9 +8,7 @@ def normalized_environment(key_values: os._Environ) -> dict:
|
||||||
results = _parse_environment(key_values)
|
results = _parse_environment(key_values)
|
||||||
if isinstance(results, dict):
|
if isinstance(results, dict):
|
||||||
return results
|
return results
|
||||||
raise Exception(
|
raise Exception(f"results from parsing environment variables was not a dict. This is troubling. Results were: {results}")
|
||||||
f"results from parsing environment variables was not a dict. This is troubling. Results were: {results}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# source originally from: https://charemza.name/blog/posts/software-engineering/devops/structured-data-in-environment-variables/
|
# source originally from: https://charemza.name/blog/posts/software-engineering/devops/structured-data-in-environment-variables/
|
||||||
|
@ -78,9 +76,7 @@ def _parse_environment(key_values: os._Environ | dict) -> list | dict:
|
||||||
)
|
)
|
||||||
|
|
||||||
def items_with_first_component(items: Iterable, first_component: str) -> dict:
|
def items_with_first_component(items: Iterable, first_component: str) -> dict:
|
||||||
return {
|
return {get_later_components(key): value for key, value in items if get_first_component(key) == first_component}
|
||||||
get_later_components(key): value for key, value in items if get_first_component(key) == first_component
|
|
||||||
}
|
|
||||||
|
|
||||||
nested_structured_dict = {
|
nested_structured_dict = {
|
||||||
**without_more_components,
|
**without_more_components,
|
||||||
|
@ -101,8 +97,6 @@ def _parse_environment(key_values: os._Environ | dict) -> list | dict:
|
||||||
return all(is_int(key) for key, value in nested_structured_dict.items())
|
return all(is_int(key) for key, value in nested_structured_dict.items())
|
||||||
|
|
||||||
def list_sorted_by_int_key() -> list:
|
def list_sorted_by_int_key() -> list:
|
||||||
return [
|
return [value for key, value in sorted(nested_structured_dict.items(), key=lambda key_value: int(key_value[0]))]
|
||||||
value for key, value in sorted(nested_structured_dict.items(), key=lambda key_value: int(key_value[0]))
|
|
||||||
]
|
|
||||||
|
|
||||||
return list_sorted_by_int_key() if all_keys_are_ints() else nested_structured_dict
|
return list_sorted_by_int_key() if all_keys_are_ints() else nested_structured_dict
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
"""qa2 just here as an example of path based routing for apps."""
|
"""qa2 just here as an example of path based routing for apps."""
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get("SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml")
|
||||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
|
|
||||||
)
|
|
||||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org"
|
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org"
|
||||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
|
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
|
||||||
SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api"
|
SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api"
|
||||||
|
|
|
@ -57,9 +57,7 @@ class ProcessInstanceMigrator:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@benchmark_log_func
|
@benchmark_log_func
|
||||||
def run_version(
|
def run_version(cls, data_migration_version_class: DataMigrationBase, process_instance: ProcessInstanceModel) -> None:
|
||||||
cls, data_migration_version_class: DataMigrationBase, process_instance: ProcessInstanceModel
|
|
||||||
) -> None:
|
|
||||||
if process_instance.spiff_serializer_version < data_migration_version_class.version():
|
if process_instance.spiff_serializer_version < data_migration_version_class.version():
|
||||||
data_migration_version_class.run(process_instance)
|
data_migration_version_class.run(process_instance)
|
||||||
process_instance.spiff_serializer_version = data_migration_version_class.version()
|
process_instance.spiff_serializer_version = data_migration_version_class.version()
|
||||||
|
|
|
@ -53,9 +53,7 @@ class VersionOneThree:
|
||||||
|
|
||||||
def process_task_definition(self, task_definition: TaskDefinitionModel) -> None:
|
def process_task_definition(self, task_definition: TaskDefinitionModel) -> None:
|
||||||
task_definition.typename = task_definition.typename.replace("_BoundaryEventParent", "BoundaryEventSplit")
|
task_definition.typename = task_definition.typename.replace("_BoundaryEventParent", "BoundaryEventSplit")
|
||||||
task_definition.bpmn_identifier = task_definition.bpmn_identifier.replace(
|
task_definition.bpmn_identifier = task_definition.bpmn_identifier.replace("BoundaryEventParent", "BoundaryEventSplit")
|
||||||
"BoundaryEventParent", "BoundaryEventSplit"
|
|
||||||
)
|
|
||||||
|
|
||||||
properties_json = copy.copy(task_definition.properties_json)
|
properties_json = copy.copy(task_definition.properties_json)
|
||||||
properties_json.pop("main_child_task_spec", None)
|
properties_json.pop("main_child_task_spec", None)
|
||||||
|
@ -65,9 +63,7 @@ class VersionOneThree:
|
||||||
# mostly for ExclusiveGateways
|
# mostly for ExclusiveGateways
|
||||||
if "cond_task_specs" in properties_json and properties_json["cond_task_specs"] is not None:
|
if "cond_task_specs" in properties_json and properties_json["cond_task_specs"] is not None:
|
||||||
for cond_task_spec in properties_json["cond_task_specs"]:
|
for cond_task_spec in properties_json["cond_task_specs"]:
|
||||||
cond_task_spec["task_spec"] = cond_task_spec["task_spec"].replace(
|
cond_task_spec["task_spec"] = cond_task_spec["task_spec"].replace("BoundaryEventParent", "BoundaryEventSplit")
|
||||||
"BoundaryEventParent", "BoundaryEventSplit"
|
|
||||||
)
|
|
||||||
if "default_task_spec" in properties_json and properties_json["default_task_spec"] is not None:
|
if "default_task_spec" in properties_json and properties_json["default_task_spec"] is not None:
|
||||||
properties_json["default_task_spec"] = properties_json["default_task_spec"].replace(
|
properties_json["default_task_spec"] = properties_json["default_task_spec"].replace(
|
||||||
"BoundaryEventParent", "BoundaryEventSplit"
|
"BoundaryEventParent", "BoundaryEventSplit"
|
||||||
|
@ -208,9 +204,7 @@ class VersionOneThree:
|
||||||
|
|
||||||
something_changed = False
|
something_changed = False
|
||||||
if "escalation_code" in properties_json["event_definition"]:
|
if "escalation_code" in properties_json["event_definition"]:
|
||||||
properties_json["event_definition"]["code"] = properties_json["event_definition"].pop(
|
properties_json["event_definition"]["code"] = properties_json["event_definition"].pop("escalation_code")
|
||||||
"escalation_code"
|
|
||||||
)
|
|
||||||
something_changed = True
|
something_changed = True
|
||||||
if "error_code" in properties_json["event_definition"]:
|
if "error_code" in properties_json["event_definition"]:
|
||||||
properties_json["event_definition"]["code"] = properties_json["event_definition"].pop("error_code")
|
properties_json["event_definition"]["code"] = properties_json["event_definition"].pop("error_code")
|
||||||
|
@ -225,9 +219,7 @@ class VersionOneThree:
|
||||||
if current_app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
|
if current_app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
|
||||||
task_models = (
|
task_models = (
|
||||||
db.session.query(TaskModel)
|
db.session.query(TaskModel)
|
||||||
.filter(
|
.filter(TaskModel.properties_json.op("->>")("last_state_changed") == None) # type: ignore # noqa: E711
|
||||||
TaskModel.properties_json.op("->>")("last_state_changed") == None # type: ignore # noqa: E711
|
|
||||||
)
|
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -31,9 +31,7 @@ class Version2(DataMigrationBase):
|
||||||
|
|
||||||
task_service.save_objects_to_database(save_process_instance_events=False)
|
task_service.save_objects_to_database(save_process_instance_events=False)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
current_app.logger.warning(
|
current_app.logger.warning(f"Failed to migrate process_instance '{process_instance.id}'. The error was {str(ex)}")
|
||||||
f"Failed to migrate process_instance '{process_instance.id}'. The error was {str(ex)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_spiff_task_parents(cls, spiff_task: SpiffTask, task_service: TaskService) -> None:
|
def update_spiff_task_parents(cls, spiff_task: SpiffTask, task_service: TaskService) -> None:
|
||||||
|
|
|
@ -38,6 +38,4 @@ class Version3(DataMigrationBase):
|
||||||
db.session.add(bpmn_process)
|
db.session.add(bpmn_process)
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
current_app.logger.warning(
|
current_app.logger.warning(f"Failed to migrate process_instance '{process_instance.id}'. The error was {str(ex)}")
|
||||||
f"Failed to migrate process_instance '{process_instance.id}'. The error was {str(ex)}"
|
|
||||||
)
|
|
||||||
|
|
|
@ -117,9 +117,7 @@ class JSONFileDataStore(BpmnDataStoreSpecification): # type: ignore
|
||||||
location = _data_store_location_for_task(my_task, self.bpmn_id)
|
location = _data_store_location_for_task(my_task, self.bpmn_id)
|
||||||
if location is None:
|
if location is None:
|
||||||
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
|
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
|
||||||
contents = FileSystemService.contents_of_json_file_at_relative_path(
|
contents = FileSystemService.contents_of_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id))
|
||||||
location, _data_store_filename(self.bpmn_id)
|
|
||||||
)
|
|
||||||
my_task.data[self.bpmn_id] = contents
|
my_task.data[self.bpmn_id] = contents
|
||||||
|
|
||||||
def set(self, my_task: SpiffTask) -> None:
|
def set(self, my_task: SpiffTask) -> None:
|
||||||
|
|
|
@ -38,11 +38,7 @@ class KKVDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_model(self, top_level_key: str, secondary_key: str) -> KKVDataStoreModel | None:
|
def _get_model(self, top_level_key: str, secondary_key: str) -> KKVDataStoreModel | None:
|
||||||
model = (
|
model = db.session.query(KKVDataStoreModel).filter_by(top_level_key=top_level_key, secondary_key=secondary_key).first()
|
||||||
db.session.query(KKVDataStoreModel)
|
|
||||||
.filter_by(top_level_key=top_level_key, secondary_key=secondary_key)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
def _delete_all_for_top_level_key(self, top_level_key: str) -> None:
|
def _delete_all_for_top_level_key(self, top_level_key: str) -> None:
|
||||||
|
|
|
@ -25,9 +25,7 @@ class TypeaheadDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ig
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def query_data_store(name: str) -> Any:
|
def query_data_store(name: str) -> Any:
|
||||||
return TypeaheadModel.query.filter_by(category=name).order_by(
|
return TypeaheadModel.query.filter_by(category=name).order_by(TypeaheadModel.category, TypeaheadModel.search_term)
|
||||||
TypeaheadModel.category, TypeaheadModel.search_term
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def build_response_item(model: Any) -> dict[str, Any]:
|
def build_response_item(model: Any) -> dict[str, Any]:
|
||||||
|
|
|
@ -273,9 +273,7 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||||
id = capture_exception(exception)
|
id = capture_exception(exception)
|
||||||
|
|
||||||
if isinstance(exception, ApiError):
|
if isinstance(exception, ApiError):
|
||||||
current_app.logger.info(
|
current_app.logger.info(f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}")
|
||||||
f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}"
|
|
||||||
)
|
|
||||||
|
|
||||||
organization_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG")
|
organization_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG")
|
||||||
project_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG")
|
project_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG")
|
||||||
|
@ -308,9 +306,7 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||||
if isinstance(exception, ApiError):
|
if isinstance(exception, ApiError):
|
||||||
api_exception = exception
|
api_exception = exception
|
||||||
elif isinstance(exception, SpiffWorkflowException):
|
elif isinstance(exception, SpiffWorkflowException):
|
||||||
api_exception = ApiError.from_workflow_exception(
|
api_exception = ApiError.from_workflow_exception("unexpected_workflow_exception", "Unexpected Workflow Error", exception)
|
||||||
"unexpected_workflow_exception", "Unexpected Workflow Error", exception
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
api_exception = ApiError(
|
api_exception = ApiError(
|
||||||
error_code=error_code,
|
error_code=error_code,
|
||||||
|
|
|
@ -106,6 +106,9 @@ from spiffworkflow_backend.models.user_property import (
|
||||||
from spiffworkflow_backend.models.service_account import (
|
from spiffworkflow_backend.models.service_account import (
|
||||||
ServiceAccountModel,
|
ServiceAccountModel,
|
||||||
) # noqa: F401
|
) # noqa: F401
|
||||||
|
from spiffworkflow_backend.models.future_task import (
|
||||||
|
FutureTaskModel,
|
||||||
|
) # noqa: F401
|
||||||
from spiffworkflow_backend.models.feature_flag import (
|
from spiffworkflow_backend.models.feature_flag import (
|
||||||
FeatureFlagModel,
|
FeatureFlagModel,
|
||||||
) # noqa: F401
|
) # noqa: F401
|
||||||
|
|
|
@ -70,9 +70,7 @@ class SpiffworkflowBaseDBModel(db.Model): # type: ignore
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def update_created_modified_on_create_listener(
|
def update_created_modified_on_create_listener(mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel) -> None:
|
||||||
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
|
|
||||||
) -> None:
|
|
||||||
"""Event listener that runs before a record is updated, and sets the create/modified field accordingly."""
|
"""Event listener that runs before a record is updated, and sets the create/modified field accordingly."""
|
||||||
if "created_at_in_seconds" in mapper.columns.keys():
|
if "created_at_in_seconds" in mapper.columns.keys():
|
||||||
target.created_at_in_seconds = round(time.time())
|
target.created_at_in_seconds = round(time.time())
|
||||||
|
@ -80,9 +78,7 @@ def update_created_modified_on_create_listener(
|
||||||
target.updated_at_in_seconds = round(time.time())
|
target.updated_at_in_seconds = round(time.time())
|
||||||
|
|
||||||
|
|
||||||
def update_modified_on_update_listener(
|
def update_modified_on_update_listener(mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel) -> None:
|
||||||
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
|
|
||||||
) -> None:
|
|
||||||
"""Event listener that runs before a record is updated, and sets the modified field accordingly."""
|
"""Event listener that runs before a record is updated, and sets the modified field accordingly."""
|
||||||
if "updated_at_in_seconds" in mapper.columns.keys():
|
if "updated_at_in_seconds" in mapper.columns.keys():
|
||||||
if db.session.is_modified(target, include_collections=False):
|
if db.session.is_modified(target, include_collections=False):
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
|
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||||
|
from sqlalchemy.dialects.postgresql import insert as postgres_insert
|
||||||
|
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||||
|
from spiffworkflow_backend.models.db import db
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FutureTaskModel(SpiffworkflowBaseDBModel):
|
||||||
|
__tablename__ = "future_task"
|
||||||
|
|
||||||
|
guid: str = db.Column(db.String(36), primary_key=True)
|
||||||
|
run_at_in_seconds: int = db.Column(db.Integer, nullable=False, index=True)
|
||||||
|
completed: bool = db.Column(db.Boolean, default=False, nullable=False, index=True)
|
||||||
|
|
||||||
|
updated_at_in_seconds: int = db.Column(db.Integer, nullable=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def insert_or_update(cls, guid: str, run_at_in_seconds: int) -> None:
|
||||||
|
task_info = [
|
||||||
|
{
|
||||||
|
"guid": guid,
|
||||||
|
"run_at_in_seconds": run_at_in_seconds,
|
||||||
|
"updated_at_in_seconds": round(time.time()),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
on_duplicate_key_stmt = None
|
||||||
|
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
|
||||||
|
insert_stmt = mysql_insert(FutureTaskModel).values(task_info)
|
||||||
|
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
|
||||||
|
run_at_in_seconds=insert_stmt.inserted.run_at_in_seconds, updated_at_in_seconds=round(time.time())
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
insert_stmt = None
|
||||||
|
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "sqlite":
|
||||||
|
insert_stmt = sqlite_insert(FutureTaskModel).values(task_info)
|
||||||
|
else:
|
||||||
|
insert_stmt = postgres_insert(FutureTaskModel).values(task_info)
|
||||||
|
on_duplicate_key_stmt = insert_stmt.on_conflict_do_update(
|
||||||
|
index_elements=["guid"],
|
||||||
|
set_={"run_at_in_seconds": run_at_in_seconds, "updated_at_in_seconds": round(time.time())},
|
||||||
|
)
|
||||||
|
db.session.execute(on_duplicate_key_stmt)
|
|
@ -24,9 +24,7 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
|
||||||
__tablename__ = "human_task"
|
__tablename__ = "human_task"
|
||||||
|
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
process_instance_id: int = db.Column(
|
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore
|
||||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
|
||||||
)
|
|
||||||
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id), index=True)
|
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id), index=True)
|
||||||
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
|
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
|
||||||
|
|
||||||
|
|
|
@ -61,9 +61,7 @@ class JsonDataModel(SpiffworkflowBaseDBModel):
|
||||||
return cls.find_object_by_hash(hash).data
|
return cls.find_object_by_hash(hash).data
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def insert_or_update_json_data_records(
|
def insert_or_update_json_data_records(cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict]) -> None:
|
||||||
cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict]
|
|
||||||
) -> None:
|
|
||||||
list_of_dicts = [*json_data_hash_to_json_data_dict_mapping.values()]
|
list_of_dicts = [*json_data_hash_to_json_data_dict_mapping.values()]
|
||||||
if len(list_of_dicts) > 0:
|
if len(list_of_dicts) > 0:
|
||||||
on_duplicate_key_stmt = None
|
on_duplicate_key_stmt = None
|
||||||
|
|
|
@ -51,15 +51,11 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
status: str = db.Column(db.String(20), nullable=False, default="ready", index=True)
|
status: str = db.Column(db.String(20), nullable=False, default="ready", index=True)
|
||||||
user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
|
user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
|
||||||
user = relationship("UserModel")
|
user = relationship("UserModel")
|
||||||
counterpart_id: int = db.Column(
|
counterpart_id: int = db.Column(db.Integer) # Not enforcing self-referential foreign key so we can delete messages.
|
||||||
db.Integer
|
|
||||||
) # Not enforcing self-referential foreign key so we can delete messages.
|
|
||||||
failure_cause: str = db.Column(db.Text())
|
failure_cause: str = db.Column(db.Text())
|
||||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||||
created_at_in_seconds: int = db.Column(db.Integer)
|
created_at_in_seconds: int = db.Column(db.Integer)
|
||||||
correlation_rules = relationship(
|
correlation_rules = relationship("MessageInstanceCorrelationRuleModel", back_populates="message_instance", cascade="delete")
|
||||||
"MessageInstanceCorrelationRuleModel", back_populates="message_instance", cascade="delete"
|
|
||||||
)
|
|
||||||
|
|
||||||
@validates("message_type")
|
@validates("message_type")
|
||||||
def validate_message_type(self, key: str, value: Any) -> Any:
|
def validate_message_type(self, key: str, value: Any) -> Any:
|
||||||
|
|
|
@ -66,12 +66,8 @@ class ProcessGroupSchema(Schema):
|
||||||
"process_groups",
|
"process_groups",
|
||||||
]
|
]
|
||||||
|
|
||||||
process_models = marshmallow.fields.List(
|
process_models = marshmallow.fields.List(marshmallow.fields.Nested("ProcessModelInfoSchema", dump_only=True, required=False))
|
||||||
marshmallow.fields.Nested("ProcessModelInfoSchema", dump_only=True, required=False)
|
process_groups = marshmallow.fields.List(marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False))
|
||||||
)
|
|
||||||
process_groups = marshmallow.fields.List(
|
|
||||||
marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
@post_load
|
@post_load
|
||||||
def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup:
|
def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup:
|
||||||
|
|
|
@ -30,13 +30,14 @@ class ProcessInstanceCannotBeDeletedError(Exception):
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceStatus(SpiffEnum):
|
class ProcessInstanceStatus(SpiffEnum):
|
||||||
not_started = "not_started"
|
|
||||||
user_input_required = "user_input_required"
|
|
||||||
waiting = "waiting"
|
|
||||||
complete = "complete"
|
complete = "complete"
|
||||||
error = "error"
|
error = "error"
|
||||||
|
not_started = "not_started"
|
||||||
|
running = "running"
|
||||||
suspended = "suspended"
|
suspended = "suspended"
|
||||||
terminated = "terminated"
|
terminated = "terminated"
|
||||||
|
user_input_required = "user_input_required"
|
||||||
|
waiting = "waiting"
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
|
@ -58,9 +59,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
|
|
||||||
active_human_tasks = relationship(
|
active_human_tasks = relationship(
|
||||||
"HumanTaskModel",
|
"HumanTaskModel",
|
||||||
primaryjoin=(
|
primaryjoin="and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)",
|
||||||
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)"
|
|
||||||
),
|
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
|
|
||||||
bpmn_process = relationship(BpmnProcessModel, cascade="delete")
|
bpmn_process = relationship(BpmnProcessModel, cascade="delete")
|
||||||
|
@ -103,10 +102,13 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
# full, none
|
# full, none
|
||||||
persistence_level: str = "full"
|
persistence_level: str = "full"
|
||||||
|
|
||||||
|
actions: dict | None = None
|
||||||
|
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
"""Return object data in serializeable format."""
|
"""Return object data in serializeable format."""
|
||||||
return {
|
return {
|
||||||
"id": self.id,
|
"id": self.id,
|
||||||
|
"actions": self.actions,
|
||||||
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
||||||
"bpmn_version_control_type": self.bpmn_version_control_type,
|
"bpmn_version_control_type": self.bpmn_version_control_type,
|
||||||
"bpmn_xml_file_contents_retrieval_error": self.bpmn_xml_file_contents_retrieval_error,
|
"bpmn_xml_file_contents_retrieval_error": self.bpmn_xml_file_contents_retrieval_error,
|
||||||
|
@ -127,9 +129,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
def serialized_with_metadata(self) -> dict[str, Any]:
|
def serialized_with_metadata(self) -> dict[str, Any]:
|
||||||
process_instance_attributes = self.serialized()
|
process_instance_attributes = self.serialized()
|
||||||
process_instance_attributes["process_metadata"] = self.process_metadata
|
process_instance_attributes["process_metadata"] = self.process_metadata
|
||||||
process_instance_attributes["process_model_with_diagram_identifier"] = (
|
process_instance_attributes["process_model_with_diagram_identifier"] = self.process_model_with_diagram_identifier
|
||||||
self.process_model_with_diagram_identifier
|
|
||||||
)
|
|
||||||
return process_instance_attributes
|
return process_instance_attributes
|
||||||
|
|
||||||
@validates("status")
|
@validates("status")
|
||||||
|
@ -146,6 +146,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
def has_terminal_status(self) -> bool:
|
def has_terminal_status(self) -> bool:
|
||||||
return self.status in self.terminal_statuses()
|
return self.status in self.terminal_statuses()
|
||||||
|
|
||||||
|
def is_immediately_runnable(self) -> bool:
|
||||||
|
return self.status in self.immediately_runnable_statuses()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def terminal_statuses(cls) -> list[str]:
|
def terminal_statuses(cls) -> list[str]:
|
||||||
return ["complete", "error", "terminated"]
|
return ["complete", "error", "terminated"]
|
||||||
|
@ -157,7 +160,11 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def active_statuses(cls) -> list[str]:
|
def active_statuses(cls) -> list[str]:
|
||||||
return ["not_started", "user_input_required", "waiting"]
|
return cls.immediately_runnable_statuses() + ["user_input_required", "waiting"]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def immediately_runnable_statuses(cls) -> list[str]:
|
||||||
|
return ["not_started", "running"]
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceModelSchema(Schema):
|
class ProcessInstanceModelSchema(Schema):
|
||||||
|
|
|
@ -13,9 +13,7 @@ class ProcessInstanceFileDataModel(SpiffworkflowBaseDBModel):
|
||||||
__tablename__ = "process_instance_file_data"
|
__tablename__ = "process_instance_file_data"
|
||||||
|
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
process_instance_id: int = db.Column(
|
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore
|
||||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
|
||||||
)
|
|
||||||
identifier: str = db.Column(db.String(255), nullable=False)
|
identifier: str = db.Column(db.String(255), nullable=False)
|
||||||
list_index: int | None = db.Column(db.Integer, nullable=True)
|
list_index: int | None = db.Column(db.Integer, nullable=True)
|
||||||
mimetype: str = db.Column(db.String(255), nullable=False)
|
mimetype: str = db.Column(db.String(255), nullable=False)
|
||||||
|
|
|
@ -13,9 +13,7 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel):
|
||||||
__table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),)
|
__table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),)
|
||||||
|
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
process_instance_id: int = db.Column(
|
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore
|
||||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
|
||||||
)
|
|
||||||
key: str = db.Column(db.String(255), nullable=False, index=True)
|
key: str = db.Column(db.String(255), nullable=False, index=True)
|
||||||
value: str = db.Column(db.String(255), nullable=False)
|
value: str = db.Column(db.String(255), nullable=False)
|
||||||
|
|
||||||
|
|
|
@ -12,9 +12,7 @@ class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel):
|
||||||
__tablename__ = "process_instance_queue"
|
__tablename__ = "process_instance_queue"
|
||||||
|
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
process_instance_id: int = db.Column(
|
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), unique=True, nullable=False) # type: ignore
|
||||||
ForeignKey(ProcessInstanceModel.id), unique=True, nullable=False # type: ignore
|
|
||||||
)
|
|
||||||
priority: int = db.Column(db.Integer)
|
priority: int = db.Column(db.Integer)
|
||||||
locked_by: str | None = db.Column(db.String(80), index=True, nullable=True)
|
locked_by: str | None = db.Column(db.String(80), index=True, nullable=True)
|
||||||
locked_at_in_seconds: int | None = db.Column(db.Integer, index=True, nullable=True)
|
locked_at_in_seconds: int | None = db.Column(db.Integer, index=True, nullable=True)
|
||||||
|
|
|
@ -110,9 +110,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
if process_instance_report is not None:
|
if process_instance_report is not None:
|
||||||
raise ProcessInstanceReportAlreadyExistsError(
|
raise ProcessInstanceReportAlreadyExistsError(f"Process instance report with identifier already exists: {identifier}")
|
||||||
f"Process instance report with identifier already exists: {identifier}"
|
|
||||||
)
|
|
||||||
|
|
||||||
report_metadata_dict = typing.cast(dict[str, Any], report_metadata)
|
report_metadata_dict = typing.cast(dict[str, Any], report_metadata)
|
||||||
json_data_hash = JsonDataModel.create_and_insert_json_data_from_dict(report_metadata_dict)
|
json_data_hash = JsonDataModel.create_and_insert_json_data_from_dict(report_metadata_dict)
|
||||||
|
|
|
@ -64,9 +64,7 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
|
||||||
"""A cache of information about all the Processes and Decisions defined in all files."""
|
"""A cache of information about all the Processes and Decisions defined in all files."""
|
||||||
|
|
||||||
__tablename__ = "reference_cache"
|
__tablename__ = "reference_cache"
|
||||||
__table_args__ = (
|
__table_args__ = (UniqueConstraint("generation_id", "identifier", "relative_location", "type", name="reference_cache_uniq"),)
|
||||||
UniqueConstraint("generation_id", "identifier", "relative_location", "type", name="reference_cache_uniq"),
|
|
||||||
)
|
|
||||||
|
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
generation_id: int = db.Column(ForeignKey(CacheGenerationModel.id), nullable=False, index=True) # type: ignore
|
generation_id: int = db.Column(ForeignKey(CacheGenerationModel.id), nullable=False, index=True) # type: ignore
|
||||||
|
|
|
@ -155,6 +155,7 @@ class Task:
|
||||||
error_message: str | None = None,
|
error_message: str | None = None,
|
||||||
assigned_user_group_identifier: str | None = None,
|
assigned_user_group_identifier: str | None = None,
|
||||||
potential_owner_usernames: str | None = None,
|
potential_owner_usernames: str | None = None,
|
||||||
|
process_model_uses_queued_execution: bool | None = None,
|
||||||
):
|
):
|
||||||
self.id = id
|
self.id = id
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -167,6 +168,7 @@ class Task:
|
||||||
self.lane = lane
|
self.lane = lane
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
self.event_definition = event_definition
|
self.event_definition = event_definition
|
||||||
|
self.process_model_uses_queued_execution = process_model_uses_queued_execution
|
||||||
|
|
||||||
self.data = data
|
self.data = data
|
||||||
if self.data is None:
|
if self.data is None:
|
||||||
|
@ -228,6 +230,7 @@ class Task:
|
||||||
"error_message": self.error_message,
|
"error_message": self.error_message,
|
||||||
"assigned_user_group_identifier": self.assigned_user_group_identifier,
|
"assigned_user_group_identifier": self.assigned_user_group_identifier,
|
||||||
"potential_owner_usernames": self.potential_owner_usernames,
|
"potential_owner_usernames": self.potential_owner_usernames,
|
||||||
|
"process_model_uses_queued_execution": self.process_model_uses_queued_execution,
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -39,9 +39,7 @@ class TaskDraftDataModel(SpiffworkflowBaseDBModel):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
process_instance_id: int = db.Column(
|
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore
|
||||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
|
||||||
)
|
|
||||||
|
|
||||||
# a colon delimited path of bpmn_process_definition_ids for a given task
|
# a colon delimited path of bpmn_process_definition_ids for a given task
|
||||||
task_definition_id_path: str = db.Column(db.String(255), nullable=False, index=True)
|
task_definition_id_path: str = db.Column(db.String(255), nullable=False, index=True)
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
|
from sqlalchemy import ForeignKey
|
||||||
|
from sqlalchemy import desc
|
||||||
|
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||||
|
from sqlalchemy.dialects.postgresql import insert as postgres_insert
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||||
|
from spiffworkflow_backend.models.db import db
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TaskInstructionsForEndUserModel(SpiffworkflowBaseDBModel):
|
||||||
|
__tablename__ = "task_instructions_for_end_user"
|
||||||
|
|
||||||
|
task_guid: str = db.Column(db.String(36), primary_key=True)
|
||||||
|
instruction: str = db.Column(db.Text(), nullable=False)
|
||||||
|
process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False, index=True)
|
||||||
|
has_been_retrieved: bool = db.Column(db.Boolean, nullable=False, default=False, index=True)
|
||||||
|
|
||||||
|
# we need this to maintain order
|
||||||
|
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False, index=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def insert_or_update_record(cls, task_guid: str, process_instance_id: int, instruction: str) -> None:
|
||||||
|
record = [
|
||||||
|
{
|
||||||
|
"task_guid": task_guid,
|
||||||
|
"process_instance_id": process_instance_id,
|
||||||
|
"instruction": instruction,
|
||||||
|
"timestamp": time.time(),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
on_duplicate_key_stmt = None
|
||||||
|
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
|
||||||
|
insert_stmt = mysql_insert(TaskInstructionsForEndUserModel).values(record)
|
||||||
|
on_duplicate_key_stmt = insert_stmt.prefix_with("IGNORE")
|
||||||
|
# on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(instruction=insert_stmt.inserted.instruction)
|
||||||
|
else:
|
||||||
|
insert_stmt = postgres_insert(TaskInstructionsForEndUserModel).values(record)
|
||||||
|
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["task_guid"])
|
||||||
|
db.session.execute(on_duplicate_key_stmt)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def entries_for_process_instance(cls, process_instance_id: int) -> list[TaskInstructionsForEndUserModel]:
|
||||||
|
entries: list[TaskInstructionsForEndUserModel] = (
|
||||||
|
cls.query.filter_by(process_instance_id=process_instance_id, has_been_retrieved=False)
|
||||||
|
.order_by(desc(TaskInstructionsForEndUserModel.timestamp)) # type: ignore
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
return entries
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def retrieve_and_clear(cls, process_instance_id: int) -> list[TaskInstructionsForEndUserModel]:
|
||||||
|
entries = cls.entries_for_process_instance(process_instance_id)
|
||||||
|
# convert to list[dict] here so we can remove the records from the db right after
|
||||||
|
for e in entries:
|
||||||
|
e.has_been_retrieved = True
|
||||||
|
db.session.add(e)
|
||||||
|
db.session.commit()
|
||||||
|
return entries
|
|
@ -13,9 +13,7 @@ from spiffworkflow_backend.models.user import UserModel
|
||||||
|
|
||||||
|
|
||||||
def active_user_updates(last_visited_identifier: str) -> Response:
|
def active_user_updates(last_visited_identifier: str) -> Response:
|
||||||
active_user = ActiveUserModel.query.filter_by(
|
active_user = ActiveUserModel.query.filter_by(user_id=g.user.id, last_visited_identifier=last_visited_identifier).first()
|
||||||
user_id=g.user.id, last_visited_identifier=last_visited_identifier
|
|
||||||
).first()
|
|
||||||
if active_user is None:
|
if active_user is None:
|
||||||
active_user = ActiveUserModel(
|
active_user = ActiveUserModel(
|
||||||
user_id=g.user.id, last_visited_identifier=last_visited_identifier, last_seen_in_seconds=round(time.time())
|
user_id=g.user.id, last_visited_identifier=last_visited_identifier, last_seen_in_seconds=round(time.time())
|
||||||
|
@ -39,9 +37,7 @@ def active_user_updates(last_visited_identifier: str) -> Response:
|
||||||
|
|
||||||
|
|
||||||
def active_user_unregister(last_visited_identifier: str) -> flask.wrappers.Response:
|
def active_user_unregister(last_visited_identifier: str) -> flask.wrappers.Response:
|
||||||
active_user = ActiveUserModel.query.filter_by(
|
active_user = ActiveUserModel.query.filter_by(user_id=g.user.id, last_visited_identifier=last_visited_identifier).first()
|
||||||
user_id=g.user.id, last_visited_identifier=last_visited_identifier
|
|
||||||
).first()
|
|
||||||
if active_user is not None:
|
if active_user is not None:
|
||||||
db.session.delete(active_user)
|
db.session.delete(active_user)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -136,16 +136,12 @@ def login_return(code: str, state: str, session_state: str = "") -> Response | N
|
||||||
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
||||||
state_redirect_url = state_dict["redirect_url"]
|
state_redirect_url = state_dict["redirect_url"]
|
||||||
authentication_identifier = state_dict["authentication_identifier"]
|
authentication_identifier = state_dict["authentication_identifier"]
|
||||||
auth_token_object = AuthenticationService().get_auth_token_object(
|
auth_token_object = AuthenticationService().get_auth_token_object(code, authentication_identifier=authentication_identifier)
|
||||||
code, authentication_identifier=authentication_identifier
|
|
||||||
)
|
|
||||||
if "id_token" in auth_token_object:
|
if "id_token" in auth_token_object:
|
||||||
id_token = auth_token_object["id_token"]
|
id_token = auth_token_object["id_token"]
|
||||||
user_info = _parse_id_token(id_token)
|
user_info = _parse_id_token(id_token)
|
||||||
|
|
||||||
if AuthenticationService.validate_id_or_access_token(
|
if AuthenticationService.validate_id_or_access_token(id_token, authentication_identifier=authentication_identifier):
|
||||||
id_token, authentication_identifier=authentication_identifier
|
|
||||||
):
|
|
||||||
if user_info and "error" not in user_info:
|
if user_info and "error" not in user_info:
|
||||||
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
||||||
g.user = user_model.id
|
g.user = user_model.id
|
||||||
|
@ -180,9 +176,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Response | N
|
||||||
def login_with_access_token(access_token: str, authentication_identifier: str) -> Response:
|
def login_with_access_token(access_token: str, authentication_identifier: str) -> Response:
|
||||||
user_info = _parse_id_token(access_token)
|
user_info = _parse_id_token(access_token)
|
||||||
|
|
||||||
if AuthenticationService.validate_id_or_access_token(
|
if AuthenticationService.validate_id_or_access_token(access_token, authentication_identifier=authentication_identifier):
|
||||||
access_token, authentication_identifier=authentication_identifier
|
|
||||||
):
|
|
||||||
if user_info and "error" not in user_info:
|
if user_info and "error" not in user_info:
|
||||||
AuthorizationService.create_user_from_sign_in(user_info)
|
AuthorizationService.create_user_from_sign_in(user_info)
|
||||||
else:
|
else:
|
||||||
|
@ -262,9 +256,7 @@ def _set_new_access_token_in_cookie(
|
||||||
response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie)
|
response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie)
|
||||||
|
|
||||||
if hasattr(tld, "new_authentication_identifier") and tld.new_authentication_identifier:
|
if hasattr(tld, "new_authentication_identifier") and tld.new_authentication_identifier:
|
||||||
response.set_cookie(
|
response.set_cookie("authentication_identifier", tld.new_authentication_identifier, domain=domain_for_frontend_cookie)
|
||||||
"authentication_identifier", tld.new_authentication_identifier, domain=domain_for_frontend_cookie
|
|
||||||
)
|
|
||||||
|
|
||||||
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
|
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
|
||||||
response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie)
|
response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie)
|
||||||
|
@ -347,9 +339,7 @@ def _get_user_model_from_token(token: str) -> UserModel | None:
|
||||||
try:
|
try:
|
||||||
user_model = _get_user_from_decoded_internal_token(decoded_token)
|
user_model = _get_user_from_decoded_internal_token(decoded_token)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
current_app.logger.error(
|
current_app.logger.error(f"Exception in verify_token getting user from decoded internal token. {e}")
|
||||||
f"Exception in verify_token getting user from decoded internal token. {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# if the user is forced logged out then stop processing the token
|
# if the user is forced logged out then stop processing the token
|
||||||
if _force_logout_user_if_necessary(user_model):
|
if _force_logout_user_if_necessary(user_model):
|
||||||
|
@ -359,9 +349,7 @@ def _get_user_model_from_token(token: str) -> UserModel | None:
|
||||||
user_info = None
|
user_info = None
|
||||||
authentication_identifier = _get_authentication_identifier_from_request()
|
authentication_identifier = _get_authentication_identifier_from_request()
|
||||||
try:
|
try:
|
||||||
if AuthenticationService.validate_id_or_access_token(
|
if AuthenticationService.validate_id_or_access_token(token, authentication_identifier=authentication_identifier):
|
||||||
token, authentication_identifier=authentication_identifier
|
|
||||||
):
|
|
||||||
user_info = decoded_token
|
user_info = decoded_token
|
||||||
except TokenExpiredError as token_expired_error:
|
except TokenExpiredError as token_expired_error:
|
||||||
# Try to refresh the token
|
# Try to refresh the token
|
||||||
|
@ -437,9 +425,7 @@ def _get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | No
|
||||||
parts = sub.split("::")
|
parts = sub.split("::")
|
||||||
service = parts[0].split(":")[1]
|
service = parts[0].split(":")[1]
|
||||||
service_id = parts[1].split(":")[1]
|
service_id = parts[1].split(":")[1]
|
||||||
user: UserModel = (
|
user: UserModel = UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
||||||
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
|
||||||
)
|
|
||||||
if user:
|
if user:
|
||||||
return user
|
return user
|
||||||
user = UserService.create_user(service_id, service, service_id)
|
user = UserService.create_user(service_id, service, service_id)
|
||||||
|
|
|
@ -43,9 +43,7 @@ def _build_response(data_store_class: Any, name: str, page: int, per_page: int)
|
||||||
return make_response(jsonify(response_json), 200)
|
return make_response(jsonify(response_json), 200)
|
||||||
|
|
||||||
|
|
||||||
def data_store_item_list(
|
def data_store_item_list(data_store_type: str, name: str, page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||||
data_store_type: str, name: str, page: int = 1, per_page: int = 100
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
"""Returns a list of the items in a data store."""
|
"""Returns a list of the items in a data store."""
|
||||||
|
|
||||||
if data_store_type == "typeahead":
|
if data_store_type == "typeahead":
|
||||||
|
|
|
@ -3,8 +3,8 @@ from flask import make_response
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask.wrappers import Response
|
from flask.wrappers import Response
|
||||||
|
|
||||||
from spiffworkflow_backend import get_version_info_data
|
|
||||||
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
||||||
|
from spiffworkflow_backend.services.monitoring_service import get_version_info_data
|
||||||
|
|
||||||
|
|
||||||
def test_raise_error() -> Response:
|
def test_raise_error() -> Response:
|
||||||
|
|
|
@ -189,9 +189,7 @@ def _run_extension(
|
||||||
|
|
||||||
if ui_schema_action:
|
if ui_schema_action:
|
||||||
if "results_markdown_filename" in ui_schema_action:
|
if "results_markdown_filename" in ui_schema_action:
|
||||||
file_contents = SpecFileService.get_data(
|
file_contents = SpecFileService.get_data(process_model, ui_schema_action["results_markdown_filename"]).decode("utf-8")
|
||||||
process_model, ui_schema_action["results_markdown_filename"]
|
|
||||||
).decode("utf-8")
|
|
||||||
form_contents = JinjaService.render_jinja_template(file_contents, task_data=task_data)
|
form_contents = JinjaService.render_jinja_template(file_contents, task_data=task_data)
|
||||||
result["rendered_results_markdown"] = form_contents
|
result["rendered_results_markdown"] = form_contents
|
||||||
|
|
||||||
|
|
|
@ -87,9 +87,7 @@ def token() -> Response | dict:
|
||||||
code = request.values.get("code")
|
code = request.values.get("code")
|
||||||
|
|
||||||
if code is None:
|
if code is None:
|
||||||
return Response(
|
return Response(json.dumps({"error": "missing_code_value_in_token_request"}), status=400, mimetype="application/json")
|
||||||
json.dumps({"error": "missing_code_value_in_token_request"}), status=400, mimetype="application/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
"""We just stuffed the user name on the front of the code, so grab it."""
|
"""We just stuffed the user name on the front of the code, so grab it."""
|
||||||
user_name, secret_hash = code.split(":")
|
user_name, secret_hash = code.split(":")
|
||||||
|
|
|
@ -9,10 +9,14 @@ from flask import jsonify
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask.wrappers import Response
|
from flask.wrappers import Response
|
||||||
|
from sqlalchemy import and_
|
||||||
|
from sqlalchemy import or_
|
||||||
|
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError
|
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||||
|
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||||
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
||||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
@ -265,3 +269,55 @@ def _find_principal_or_raise() -> PrincipalModel:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return principal # type: ignore
|
return principal # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def _find_process_instance_for_me_or_raise(
|
||||||
|
process_instance_id: int,
|
||||||
|
include_actions: bool = False,
|
||||||
|
) -> ProcessInstanceModel:
|
||||||
|
process_instance: ProcessInstanceModel | None = (
|
||||||
|
ProcessInstanceModel.query.filter_by(id=process_instance_id)
|
||||||
|
.outerjoin(HumanTaskModel)
|
||||||
|
.outerjoin(
|
||||||
|
HumanTaskUserModel,
|
||||||
|
and_(
|
||||||
|
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||||
|
HumanTaskUserModel.user_id == g.user.id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
or_(
|
||||||
|
# you were allowed to complete it
|
||||||
|
HumanTaskUserModel.id.is_not(None),
|
||||||
|
# or you completed it (which admins can do even if it wasn't assigned via HumanTaskUserModel)
|
||||||
|
HumanTaskModel.completed_by_user_id == g.user.id,
|
||||||
|
# or you started it
|
||||||
|
ProcessInstanceModel.process_initiator_id == g.user.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if process_instance is None:
|
||||||
|
raise (
|
||||||
|
ApiError(
|
||||||
|
error_code="process_instance_cannot_be_found",
|
||||||
|
message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if include_actions:
|
||||||
|
modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||||
|
process_instance.process_model_identifier
|
||||||
|
)
|
||||||
|
target_uri = f"/v1.0/process-instances/for-me/{modified_process_model_identifier}/{process_instance.id}"
|
||||||
|
has_permission = AuthorizationService.user_has_permission(
|
||||||
|
user=g.user,
|
||||||
|
permission="read",
|
||||||
|
target_uri=target_uri,
|
||||||
|
)
|
||||||
|
if has_permission:
|
||||||
|
process_instance.actions = {"read": {"path": target_uri, "method": "GET"}}
|
||||||
|
|
||||||
|
return process_instance
|
||||||
|
|
|
@ -118,7 +118,5 @@ def process_group_show(
|
||||||
def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response:
|
def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response:
|
||||||
original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier)
|
original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier)
|
||||||
new_process_group = ProcessModelService.process_group_move(original_process_group_id, new_location)
|
new_process_group = ProcessModelService.process_group_move(original_process_group_id, new_location)
|
||||||
_commit_and_push_to_git(
|
_commit_and_push_to_git(f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}")
|
||||||
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
|
|
||||||
)
|
|
||||||
return make_response(jsonify(new_process_group), 200)
|
return make_response(jsonify(new_process_group), 200)
|
||||||
|
|
|
@ -69,9 +69,7 @@ def log_list(
|
||||||
log_query = log_query.filter(ProcessInstanceEventModel.event_type == event_type)
|
log_query = log_query.filter(ProcessInstanceEventModel.event_type == event_type)
|
||||||
|
|
||||||
logs = (
|
logs = (
|
||||||
log_query.order_by(
|
log_query.order_by(ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc()) # type: ignore
|
||||||
ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore
|
|
||||||
)
|
|
||||||
.outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id)
|
.outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id)
|
||||||
.add_columns(
|
.add_columns(
|
||||||
TaskModel.guid.label("spiff_task_guid"), # type: ignore
|
TaskModel.guid.label("spiff_task_guid"), # type: ignore
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
"""APIs for dealing with process groups, process models, and process instances."""
|
# black and ruff are in competition with each other in import formatting so ignore ruff
|
||||||
|
# ruff: noqa: I001
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
@ -8,17 +10,20 @@ from flask import g
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
from flask.wrappers import Response
|
from flask.wrappers import Response
|
||||||
from sqlalchemy import and_
|
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
from sqlalchemy.orm import aliased
|
from sqlalchemy.orm import aliased
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_enabled_for_process_model,
|
||||||
|
)
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_process_instance_if_appropriate,
|
||||||
|
)
|
||||||
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
|
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||||
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
|
||||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
|
||||||
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
|
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceCannotBeDeletedError
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceCannotBeDeletedError
|
||||||
|
@ -33,6 +38,7 @@ from spiffworkflow_backend.models.reference_cache import ReferenceNotFoundError
|
||||||
from spiffworkflow_backend.models.task import TaskModel
|
from spiffworkflow_backend.models.task import TaskModel
|
||||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
|
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
|
||||||
|
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_for_me_or_raise
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _un_modify_modified_process_model_id
|
from spiffworkflow_backend.routes.process_api_blueprint import _un_modify_modified_process_model_id
|
||||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||||
|
@ -49,30 +55,6 @@ from spiffworkflow_backend.services.process_instance_service import ProcessInsta
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
from spiffworkflow_backend.services.task_service import TaskService
|
from spiffworkflow_backend.services.task_service import TaskService
|
||||||
|
|
||||||
# from spiffworkflow_backend.services.process_instance_report_service import (
|
|
||||||
# ProcessInstanceReportFilter,
|
|
||||||
# )
|
|
||||||
|
|
||||||
|
|
||||||
def _process_instance_create(
|
|
||||||
process_model_identifier: str,
|
|
||||||
) -> ProcessInstanceModel:
|
|
||||||
process_model = _get_process_model(process_model_identifier)
|
|
||||||
if process_model.primary_file_name is None:
|
|
||||||
raise ApiError(
|
|
||||||
error_code="process_model_missing_primary_bpmn_file",
|
|
||||||
message=(
|
|
||||||
f"Process Model '{process_model_identifier}' does not have a primary"
|
|
||||||
" bpmn file. One must be set in order to instantiate this model."
|
|
||||||
),
|
|
||||||
status_code=400,
|
|
||||||
)
|
|
||||||
|
|
||||||
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
|
||||||
process_model_identifier, g.user
|
|
||||||
)
|
|
||||||
return process_instance
|
|
||||||
|
|
||||||
|
|
||||||
def process_instance_create(
|
def process_instance_create(
|
||||||
modified_process_model_identifier: str,
|
modified_process_model_identifier: str,
|
||||||
|
@ -87,55 +69,18 @@ def process_instance_create(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _process_instance_run(
|
|
||||||
process_instance: ProcessInstanceModel,
|
|
||||||
) -> None:
|
|
||||||
if process_instance.status != "not_started":
|
|
||||||
raise ApiError(
|
|
||||||
error_code="process_instance_not_runnable",
|
|
||||||
message=f"Process Instance ({process_instance.id}) is currently running or has already run.",
|
|
||||||
status_code=400,
|
|
||||||
)
|
|
||||||
|
|
||||||
processor = None
|
|
||||||
try:
|
|
||||||
if not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance):
|
|
||||||
processor = ProcessInstanceService.run_process_instance_with_processor(process_instance)
|
|
||||||
except (
|
|
||||||
ApiError,
|
|
||||||
ProcessInstanceIsNotEnqueuedError,
|
|
||||||
ProcessInstanceIsAlreadyLockedError,
|
|
||||||
) as e:
|
|
||||||
ErrorHandlingService.handle_error(process_instance, e)
|
|
||||||
raise e
|
|
||||||
except Exception as e:
|
|
||||||
ErrorHandlingService.handle_error(process_instance, e)
|
|
||||||
# FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes.
|
|
||||||
# we need to recurse through all last tasks if the last task is a call activity or subprocess.
|
|
||||||
if processor is not None:
|
|
||||||
task = processor.bpmn_process_instance.last_task
|
|
||||||
raise ApiError.from_task(
|
|
||||||
error_code="unknown_exception",
|
|
||||||
message=f"An unknown error occurred. Original error: {e}",
|
|
||||||
status_code=400,
|
|
||||||
task=task,
|
|
||||||
) from e
|
|
||||||
raise e
|
|
||||||
|
|
||||||
if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP"]:
|
|
||||||
MessageService.correlate_all_message_instances()
|
|
||||||
|
|
||||||
|
|
||||||
def process_instance_run(
|
def process_instance_run(
|
||||||
modified_process_model_identifier: str,
|
modified_process_model_identifier: str,
|
||||||
process_instance_id: int,
|
process_instance_id: int,
|
||||||
|
force_run: bool = False,
|
||||||
) -> flask.wrappers.Response:
|
) -> flask.wrappers.Response:
|
||||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
_process_instance_run(process_instance)
|
_process_instance_run(process_instance, force_run=force_run)
|
||||||
|
|
||||||
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(process_instance)
|
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(process_instance)
|
||||||
process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
|
process_instance_api_dict = ProcessInstanceApiSchema().dump(process_instance_api)
|
||||||
return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json")
|
process_instance_api_dict["process_model_uses_queued_execution"] = queue_enabled_for_process_model(process_instance)
|
||||||
|
return Response(json.dumps(process_instance_api_dict), status=200, mimetype="application/json")
|
||||||
|
|
||||||
|
|
||||||
def process_instance_terminate(
|
def process_instance_terminate(
|
||||||
|
@ -189,6 +134,9 @@ def process_instance_resume(
|
||||||
try:
|
try:
|
||||||
with ProcessInstanceQueueService.dequeued(process_instance):
|
with ProcessInstanceQueueService.dequeued(process_instance):
|
||||||
processor.resume()
|
processor.resume()
|
||||||
|
# the process instance will be in waiting since we just successfully resumed it.
|
||||||
|
# tell the celery worker to get busy.
|
||||||
|
queue_process_instance_if_appropriate(process_instance)
|
||||||
except (
|
except (
|
||||||
ProcessInstanceIsNotEnqueuedError,
|
ProcessInstanceIsNotEnqueuedError,
|
||||||
ProcessInstanceIsAlreadyLockedError,
|
ProcessInstanceIsAlreadyLockedError,
|
||||||
|
@ -245,10 +193,7 @@ def process_instance_report_show(
|
||||||
if report_hash is None and report_id is None and report_identifier is None:
|
if report_hash is None and report_id is None and report_identifier is None:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
error_code="report_key_missing",
|
error_code="report_key_missing",
|
||||||
message=(
|
message="A report key is needed to lookup a report. Either choose a report_hash, report_id, or report_identifier.",
|
||||||
"A report key is needed to lookup a report. Either choose a report_hash, report_id, or"
|
|
||||||
" report_identifier."
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
response_result: Report | ProcessInstanceReportModel | None = None
|
response_result: Report | ProcessInstanceReportModel | None = None
|
||||||
if report_hash is not None:
|
if report_hash is not None:
|
||||||
|
@ -275,9 +220,7 @@ def process_instance_report_column_list(
|
||||||
) -> flask.wrappers.Response:
|
) -> flask.wrappers.Response:
|
||||||
table_columns = ProcessInstanceReportService.builtin_column_options()
|
table_columns = ProcessInstanceReportService.builtin_column_options()
|
||||||
system_report_column_options = ProcessInstanceReportService.system_report_column_options()
|
system_report_column_options = ProcessInstanceReportService.system_report_column_options()
|
||||||
columns_for_metadata_strings = ProcessInstanceReportService.process_instance_metadata_as_columns(
|
columns_for_metadata_strings = ProcessInstanceReportService.process_instance_metadata_as_columns(process_model_identifier)
|
||||||
process_model_identifier
|
|
||||||
)
|
|
||||||
return make_response(jsonify(table_columns + system_report_column_options + columns_for_metadata_strings), 200)
|
return make_response(jsonify(table_columns + system_report_column_options + columns_for_metadata_strings), 200)
|
||||||
|
|
||||||
|
|
||||||
|
@ -307,9 +250,7 @@ def process_instance_show(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def process_instance_delete(
|
def process_instance_delete(process_instance_id: int, modified_process_model_identifier: str) -> flask.wrappers.Response:
|
||||||
process_instance_id: int, modified_process_model_identifier: str
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
|
|
||||||
if not process_instance.has_terminal_status():
|
if not process_instance.has_terminal_status():
|
||||||
|
@ -433,8 +374,7 @@ def process_instance_task_list(
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
error_code="bpmn_process_not_found",
|
error_code="bpmn_process_not_found",
|
||||||
message=(
|
message=(
|
||||||
f"Cannot find a bpmn process with guid '{bpmn_process_guid}' for process instance"
|
f"Cannot find a bpmn process with guid '{bpmn_process_guid}' for process instance '{process_instance.id}'"
|
||||||
f" '{process_instance.id}'"
|
|
||||||
),
|
),
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
|
@ -473,9 +413,7 @@ def process_instance_task_list(
|
||||||
task_models_of_parent_bpmn_processes,
|
task_models_of_parent_bpmn_processes,
|
||||||
) = TaskService.task_models_of_parent_bpmn_processes(to_task_model)
|
) = TaskService.task_models_of_parent_bpmn_processes(to_task_model)
|
||||||
task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
|
task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
|
||||||
if to_task_model.runtime_info and (
|
if to_task_model.runtime_info and ("instance" in to_task_model.runtime_info or "iteration" in to_task_model.runtime_info):
|
||||||
"instance" in to_task_model.runtime_info or "iteration" in to_task_model.runtime_info
|
|
||||||
):
|
|
||||||
to_task_model_parent = [to_task_model.properties_json["parent"]]
|
to_task_model_parent = [to_task_model.properties_json["parent"]]
|
||||||
else:
|
else:
|
||||||
to_task_model_parent = []
|
to_task_model_parent = []
|
||||||
|
@ -500,8 +438,7 @@ def process_instance_task_list(
|
||||||
)
|
)
|
||||||
.outerjoin(
|
.outerjoin(
|
||||||
direct_parent_bpmn_process_definition_alias,
|
direct_parent_bpmn_process_definition_alias,
|
||||||
direct_parent_bpmn_process_definition_alias.id
|
direct_parent_bpmn_process_definition_alias.id == direct_parent_bpmn_process_alias.bpmn_process_definition_id,
|
||||||
== direct_parent_bpmn_process_alias.bpmn_process_definition_id,
|
|
||||||
)
|
)
|
||||||
.join(
|
.join(
|
||||||
BpmnProcessDefinitionModel,
|
BpmnProcessDefinitionModel,
|
||||||
|
@ -554,9 +491,7 @@ def process_instance_task_list(
|
||||||
most_recent_tasks[row_key] = task_model
|
most_recent_tasks[row_key] = task_model
|
||||||
if task_model.typename in ["SubWorkflowTask", "CallActivity"]:
|
if task_model.typename in ["SubWorkflowTask", "CallActivity"]:
|
||||||
relevant_subprocess_guids.add(task_model.guid)
|
relevant_subprocess_guids.add(task_model.guid)
|
||||||
elif task_model.runtime_info and (
|
elif task_model.runtime_info and ("instance" in task_model.runtime_info or "iteration" in task_model.runtime_info):
|
||||||
"instance" in task_model.runtime_info or "iteration" in task_model.runtime_info
|
|
||||||
):
|
|
||||||
# This handles adding all instances of a MI and iterations of loop tasks
|
# This handles adding all instances of a MI and iterations of loop tasks
|
||||||
additional_tasks.append(task_model)
|
additional_tasks.append(task_model)
|
||||||
|
|
||||||
|
@ -573,9 +508,7 @@ def process_instance_task_list(
|
||||||
if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED":
|
if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED":
|
||||||
TaskService.reset_task_model_dict(task_model, state="READY")
|
TaskService.reset_task_model_dict(task_model, state="READY")
|
||||||
elif (
|
elif (
|
||||||
end_in_seconds is None
|
end_in_seconds is None or to_task_model.end_in_seconds is None or to_task_model.end_in_seconds < end_in_seconds
|
||||||
or to_task_model.end_in_seconds is None
|
|
||||||
or to_task_model.end_in_seconds < end_in_seconds
|
|
||||||
) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids:
|
) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids:
|
||||||
TaskService.reset_task_model_dict(task_model, state="WAITING")
|
TaskService.reset_task_model_dict(task_model, state="WAITING")
|
||||||
return make_response(jsonify(task_models_dict), 200)
|
return make_response(jsonify(task_models_dict), 200)
|
||||||
|
@ -672,9 +605,7 @@ def _get_process_instance(
|
||||||
process_model_with_diagram = None
|
process_model_with_diagram = None
|
||||||
name_of_file_with_diagram = None
|
name_of_file_with_diagram = None
|
||||||
if process_identifier:
|
if process_identifier:
|
||||||
spec_reference = (
|
spec_reference = ReferenceCacheModel.basic_query().filter_by(identifier=process_identifier, type="process").first()
|
||||||
ReferenceCacheModel.basic_query().filter_by(identifier=process_identifier, type="process").first()
|
|
||||||
)
|
|
||||||
if spec_reference is None:
|
if spec_reference is None:
|
||||||
raise ReferenceNotFoundError(f"Could not find given process identifier in the cache: {process_identifier}")
|
raise ReferenceNotFoundError(f"Could not find given process identifier in the cache: {process_identifier}")
|
||||||
|
|
||||||
|
@ -702,39 +633,64 @@ def _get_process_instance(
|
||||||
return make_response(jsonify(process_instance_as_dict), 200)
|
return make_response(jsonify(process_instance_as_dict), 200)
|
||||||
|
|
||||||
|
|
||||||
def _find_process_instance_for_me_or_raise(
|
def _process_instance_run(
|
||||||
process_instance_id: int,
|
process_instance: ProcessInstanceModel,
|
||||||
) -> ProcessInstanceModel:
|
force_run: bool = False,
|
||||||
process_instance: ProcessInstanceModel | None = (
|
) -> None:
|
||||||
ProcessInstanceModel.query.filter_by(id=process_instance_id)
|
if process_instance.status != "not_started" and not force_run:
|
||||||
.outerjoin(HumanTaskModel)
|
raise ApiError(
|
||||||
.outerjoin(
|
error_code="process_instance_not_runnable",
|
||||||
HumanTaskUserModel,
|
message=f"Process Instance ({process_instance.id}) is currently running or has already run.",
|
||||||
and_(
|
|
||||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
|
||||||
HumanTaskUserModel.user_id == g.user.id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.filter(
|
|
||||||
or_(
|
|
||||||
# you were allowed to complete it
|
|
||||||
HumanTaskUserModel.id.is_not(None),
|
|
||||||
# or you completed it (which admins can do even if it wasn't assigned via HumanTaskUserModel)
|
|
||||||
HumanTaskModel.completed_by_user_id == g.user.id,
|
|
||||||
# or you started it
|
|
||||||
ProcessInstanceModel.process_initiator_id == g.user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
if process_instance is None:
|
|
||||||
raise (
|
|
||||||
ApiError(
|
|
||||||
error_code="process_instance_cannot_be_found",
|
|
||||||
message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.",
|
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
processor = None
|
||||||
|
task_runnability = None
|
||||||
|
try:
|
||||||
|
if queue_enabled_for_process_model(process_instance):
|
||||||
|
queue_process_instance_if_appropriate(process_instance)
|
||||||
|
elif not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance):
|
||||||
|
processor, task_runnability = ProcessInstanceService.run_process_instance_with_processor(process_instance)
|
||||||
|
except (
|
||||||
|
ApiError,
|
||||||
|
ProcessInstanceIsNotEnqueuedError,
|
||||||
|
ProcessInstanceIsAlreadyLockedError,
|
||||||
|
) as e:
|
||||||
|
ErrorHandlingService.handle_error(process_instance, e)
|
||||||
|
raise e
|
||||||
|
except Exception as e:
|
||||||
|
ErrorHandlingService.handle_error(process_instance, e)
|
||||||
|
# FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes.
|
||||||
|
# we need to recurse through all last tasks if the last task is a call activity or subprocess.
|
||||||
|
if processor is not None:
|
||||||
|
task = processor.bpmn_process_instance.last_task
|
||||||
|
raise ApiError.from_task(
|
||||||
|
error_code="unknown_exception",
|
||||||
|
message=f"An unknown error occurred. Original error: {e}",
|
||||||
|
status_code=400,
|
||||||
|
task=task,
|
||||||
|
) from e
|
||||||
|
raise e
|
||||||
|
|
||||||
|
if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP"]:
|
||||||
|
MessageService.correlate_all_message_instances()
|
||||||
|
|
||||||
|
|
||||||
|
def _process_instance_create(
|
||||||
|
process_model_identifier: str,
|
||||||
|
) -> ProcessInstanceModel:
|
||||||
|
process_model = _get_process_model(process_model_identifier)
|
||||||
|
if process_model.primary_file_name is None:
|
||||||
|
raise ApiError(
|
||||||
|
error_code="process_model_missing_primary_bpmn_file",
|
||||||
|
message=(
|
||||||
|
f"Process Model '{process_model_identifier}' does not have a primary"
|
||||||
|
" bpmn file. One must be set in order to instantiate this model."
|
||||||
|
),
|
||||||
|
status_code=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||||
|
process_model_identifier, g.user
|
||||||
|
)
|
||||||
return process_instance
|
return process_instance
|
||||||
|
|
|
@ -161,9 +161,7 @@ def process_model_show(modified_process_model_identifier: str, include_file_refe
|
||||||
# if the user got here then they can read the process model
|
# if the user got here then they can read the process model
|
||||||
available_actions = {"read": {"path": f"/process-models/{modified_process_model_identifier}", "method": "GET"}}
|
available_actions = {"read": {"path": f"/process-models/{modified_process_model_identifier}", "method": "GET"}}
|
||||||
if GitService.check_for_publish_configs(raise_on_missing=False):
|
if GitService.check_for_publish_configs(raise_on_missing=False):
|
||||||
available_actions = {
|
available_actions = {"publish": {"path": f"/process-model-publish/{modified_process_model_identifier}", "method": "POST"}}
|
||||||
"publish": {"path": f"/process-model-publish/{modified_process_model_identifier}", "method": "POST"}
|
|
||||||
}
|
|
||||||
process_model.actions = available_actions
|
process_model.actions = available_actions
|
||||||
|
|
||||||
return make_response(jsonify(process_model), 200)
|
return make_response(jsonify(process_model), 200)
|
||||||
|
@ -172,21 +170,16 @@ def process_model_show(modified_process_model_identifier: str, include_file_refe
|
||||||
def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response:
|
def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response:
|
||||||
original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
||||||
new_process_model = ProcessModelService.process_model_move(original_process_model_id, new_location)
|
new_process_model = ProcessModelService.process_model_move(original_process_model_id, new_location)
|
||||||
_commit_and_push_to_git(
|
_commit_and_push_to_git(f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}")
|
||||||
f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
|
|
||||||
)
|
|
||||||
return make_response(jsonify(new_process_model), 200)
|
return make_response(jsonify(new_process_model), 200)
|
||||||
|
|
||||||
|
|
||||||
def process_model_publish(
|
def process_model_publish(modified_process_model_identifier: str, branch_to_update: str | None = None) -> flask.wrappers.Response:
|
||||||
modified_process_model_identifier: str, branch_to_update: str | None = None
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
if branch_to_update is None:
|
if branch_to_update is None:
|
||||||
branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"]
|
branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"]
|
||||||
if branch_to_update is None:
|
if branch_to_update is None:
|
||||||
raise MissingGitConfigsError(
|
raise MissingGitConfigsError(
|
||||||
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. "
|
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. This is required for publishing process models"
|
||||||
"This is required for publishing process models"
|
|
||||||
)
|
)
|
||||||
process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
||||||
pr_url = GitService().publish(process_model_identifier, branch_to_update)
|
pr_url = GitService().publish(process_model_identifier, branch_to_update)
|
||||||
|
@ -267,9 +260,7 @@ def process_model_file_delete(modified_process_model_identifier: str, file_name:
|
||||||
)
|
)
|
||||||
) from exception
|
) from exception
|
||||||
|
|
||||||
_commit_and_push_to_git(
|
_commit_and_push_to_git(f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}")
|
||||||
f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
|
|
||||||
)
|
|
||||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||||
|
|
||||||
|
|
||||||
|
@ -331,9 +322,7 @@ def process_model_test_run(
|
||||||
# "natural_language_text": "Create a bug tracker process model \
|
# "natural_language_text": "Create a bug tracker process model \
|
||||||
# with a bug-details form that collects summary, description, and priority"
|
# with a bug-details form that collects summary, description, and priority"
|
||||||
# }
|
# }
|
||||||
def process_model_create_with_natural_language(
|
def process_model_create_with_natural_language(modified_process_group_id: str, body: dict[str, str]) -> flask.wrappers.Response:
|
||||||
modified_process_group_id: str, body: dict[str, str]
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
pattern = re.compile(
|
pattern = re.compile(
|
||||||
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r" collects (?P<columns>.*)"
|
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r" collects (?P<columns>.*)"
|
||||||
)
|
)
|
||||||
|
@ -391,9 +380,7 @@ def process_model_create_with_natural_language(
|
||||||
with open(bpmn_template_file, encoding="utf-8") as f:
|
with open(bpmn_template_file, encoding="utf-8") as f:
|
||||||
bpmn_template_contents = f.read()
|
bpmn_template_contents = f.read()
|
||||||
|
|
||||||
bpmn_template_contents = bpmn_template_contents.replace(
|
bpmn_template_contents = bpmn_template_contents.replace("natural_language_process_id_template", bpmn_process_identifier)
|
||||||
"natural_language_process_id_template", bpmn_process_identifier
|
|
||||||
)
|
|
||||||
bpmn_template_contents = bpmn_template_contents.replace("form-identifier-id-template", form_identifier)
|
bpmn_template_contents = bpmn_template_contents.replace("form-identifier-id-template", form_identifier)
|
||||||
|
|
||||||
form_uischema_json: dict = {"ui:order": columns}
|
form_uischema_json: dict = {"ui:order": columns}
|
||||||
|
@ -427,9 +414,7 @@ def process_model_create_with_natural_language(
|
||||||
str.encode(json.dumps(form_uischema_json)),
|
str.encode(json.dumps(form_uischema_json)),
|
||||||
)
|
)
|
||||||
|
|
||||||
_commit_and_push_to_git(
|
_commit_and_push_to_git(f"User: {g.user.username} created process model via natural language: {process_model_info.id}")
|
||||||
f"User: {g.user.username} created process model via natural language: {process_model_info.id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
default_report_metadata = ProcessInstanceReportService.system_metadata_map("default")
|
default_report_metadata = ProcessInstanceReportService.system_metadata_map("default")
|
||||||
if default_report_metadata is None:
|
if default_report_metadata is None:
|
||||||
|
|
|
@ -19,9 +19,7 @@ from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTes
|
||||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||||
|
|
||||||
|
|
||||||
def script_unit_test_create(
|
def script_unit_test_create(modified_process_model_identifier: str, body: dict[str, str | bool | int]) -> flask.wrappers.Response:
|
||||||
modified_process_model_identifier: str, body: dict[str, str | bool | int]
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body)
|
bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body)
|
||||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||||
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
|
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
|
||||||
|
@ -92,9 +90,7 @@ def script_unit_test_create(
|
||||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||||
|
|
||||||
|
|
||||||
def script_unit_test_run(
|
def script_unit_test_run(modified_process_model_identifier: str, body: dict[str, str | bool | int]) -> flask.wrappers.Response:
|
||||||
modified_process_model_identifier: str, body: dict[str, str | bool | int]
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
# FIXME: We should probably clear this somewhere else but this works
|
# FIXME: We should probably clear this somewhere else but this works
|
||||||
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
||||||
|
|
||||||
|
@ -102,7 +98,5 @@ def script_unit_test_run(
|
||||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||||
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
|
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
|
||||||
|
|
||||||
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
|
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(python_script, input_json, expected_output_json)
|
||||||
python_script, input_json, expected_output_json
|
|
||||||
)
|
|
||||||
return make_response(jsonify(result), 200)
|
return make_response(jsonify(result), 200)
|
||||||
|
|
|
@ -68,9 +68,7 @@ def authentication_callback(
|
||||||
verify_token(token, force_run=True)
|
verify_token(token, force_run=True)
|
||||||
remote_app = OAuthService.remote_app(service, token)
|
remote_app = OAuthService.remote_app(service, token)
|
||||||
response = remote_app.authorized_response()
|
response = remote_app.authorized_response()
|
||||||
SecretService.update_secret(
|
SecretService.update_secret(f"{service}_{auth_method}", response["access_token"], g.user.id, create_if_not_exists=True)
|
||||||
f"{service}_{auth_method}", response["access_token"], g.user.id, create_if_not_exists=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
verify_token(request.args.get("token"), force_run=True)
|
verify_token(request.args.get("token"), force_run=True)
|
||||||
response = request.args["response"]
|
response = request.args["response"]
|
||||||
|
|
|
@ -26,6 +26,12 @@ from sqlalchemy import func
|
||||||
from sqlalchemy.orm import aliased
|
from sqlalchemy.orm import aliased
|
||||||
from sqlalchemy.orm.util import AliasedClass
|
from sqlalchemy.orm.util import AliasedClass
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_enabled_for_process_model,
|
||||||
|
)
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_process_instance_if_appropriate,
|
||||||
|
)
|
||||||
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
|
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
|
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
|
||||||
|
@ -48,9 +54,11 @@ from spiffworkflow_backend.models.task import Task
|
||||||
from spiffworkflow_backend.models.task import TaskModel
|
from spiffworkflow_backend.models.task import TaskModel
|
||||||
from spiffworkflow_backend.models.task_draft_data import TaskDraftDataDict
|
from spiffworkflow_backend.models.task_draft_data import TaskDraftDataDict
|
||||||
from spiffworkflow_backend.models.task_draft_data import TaskDraftDataModel
|
from spiffworkflow_backend.models.task_draft_data import TaskDraftDataModel
|
||||||
|
from spiffworkflow_backend.models.task_instructions_for_end_user import TaskInstructionsForEndUserModel
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _find_principal_or_raise
|
from spiffworkflow_backend.routes.process_api_blueprint import _find_principal_or_raise
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
|
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
|
||||||
|
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_for_me_or_raise
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||||
|
@ -91,9 +99,7 @@ def task_allows_guest(
|
||||||
|
|
||||||
|
|
||||||
# this is currently not used by the Frontend
|
# this is currently not used by the Frontend
|
||||||
def task_list_my_tasks(
|
def task_list_my_tasks(process_instance_id: int | None = None, page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||||
process_instance_id: int | None = None, page: int = 1, per_page: int = 100
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
principal = _find_principal_or_raise()
|
principal = _find_principal_or_raise()
|
||||||
assigned_user = aliased(UserModel)
|
assigned_user = aliased(UserModel)
|
||||||
process_initiator_user = aliased(UserModel)
|
process_initiator_user = aliased(UserModel)
|
||||||
|
@ -263,8 +269,7 @@ def task_data_update(
|
||||||
if process_instance:
|
if process_instance:
|
||||||
if process_instance.status != "suspended":
|
if process_instance.status != "suspended":
|
||||||
raise ProcessInstanceTaskDataCannotBeUpdatedError(
|
raise ProcessInstanceTaskDataCannotBeUpdatedError(
|
||||||
"The process instance needs to be suspended to update the task-data."
|
f"The process instance needs to be suspended to update the task-data. It is currently: {process_instance.status}"
|
||||||
f" It is currently: {process_instance.status}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
task_model = TaskModel.query.filter_by(guid=task_guid).first()
|
task_model = TaskModel.query.filter_by(guid=task_guid).first()
|
||||||
|
@ -360,9 +365,7 @@ def task_assign(
|
||||||
)
|
)
|
||||||
|
|
||||||
task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
|
task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
|
||||||
human_tasks = HumanTaskModel.query.filter_by(
|
human_tasks = HumanTaskModel.query.filter_by(process_instance_id=process_instance.id, task_id=task_model.guid).all()
|
||||||
process_instance_id=process_instance.id, task_id=task_model.guid
|
|
||||||
).all()
|
|
||||||
|
|
||||||
if len(human_tasks) > 1:
|
if len(human_tasks) > 1:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
|
@ -463,15 +466,11 @@ def task_show(
|
||||||
)
|
)
|
||||||
relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path())
|
relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path())
|
||||||
process_model_relative_path = os.path.dirname(relative_path)
|
process_model_relative_path = os.path.dirname(relative_path)
|
||||||
process_model_with_form = ProcessModelService.get_process_model_from_relative_path(
|
process_model_with_form = ProcessModelService.get_process_model_from_relative_path(process_model_relative_path)
|
||||||
process_model_relative_path
|
|
||||||
)
|
|
||||||
|
|
||||||
form_schema_file_name = ""
|
form_schema_file_name = ""
|
||||||
form_ui_schema_file_name = ""
|
form_ui_schema_file_name = ""
|
||||||
task_model.signal_buttons = TaskService.get_ready_signals_with_button_labels(
|
task_model.signal_buttons = TaskService.get_ready_signals_with_button_labels(process_instance_id, task_model.guid)
|
||||||
process_instance_id, task_model.guid
|
|
||||||
)
|
|
||||||
|
|
||||||
if "properties" in extensions:
|
if "properties" in extensions:
|
||||||
properties = extensions["properties"]
|
properties = extensions["properties"]
|
||||||
|
@ -493,10 +492,7 @@ def task_show(
|
||||||
raise (
|
raise (
|
||||||
ApiError(
|
ApiError(
|
||||||
error_code="missing_form_file",
|
error_code="missing_form_file",
|
||||||
message=(
|
message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid: {task_guid}",
|
||||||
f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid:"
|
|
||||||
f" {task_guid}"
|
|
||||||
),
|
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -541,6 +537,50 @@ def task_submit(
|
||||||
return _task_submit_shared(process_instance_id, task_guid, body)
|
return _task_submit_shared(process_instance_id, task_guid, body)
|
||||||
|
|
||||||
|
|
||||||
|
def process_instance_progress(
|
||||||
|
process_instance_id: int,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
response: dict[str, Task | ProcessInstanceModel | list] = {}
|
||||||
|
process_instance = _find_process_instance_for_me_or_raise(process_instance_id, include_actions=True)
|
||||||
|
|
||||||
|
principal = _find_principal_or_raise()
|
||||||
|
next_human_task_assigned_to_me = _next_human_task_for_user(process_instance_id, principal.user_id)
|
||||||
|
if next_human_task_assigned_to_me:
|
||||||
|
response["task"] = HumanTaskModel.to_task(next_human_task_assigned_to_me)
|
||||||
|
# this may not catch all times we should redirect to instance show page
|
||||||
|
elif not process_instance.is_immediately_runnable():
|
||||||
|
# any time we assign this process_instance, the frontend progress page will redirect to process instance show
|
||||||
|
response["process_instance"] = process_instance
|
||||||
|
|
||||||
|
user_instructions = TaskInstructionsForEndUserModel.retrieve_and_clear(process_instance.id)
|
||||||
|
response["instructions"] = user_instructions
|
||||||
|
|
||||||
|
return make_response(jsonify(response), 200)
|
||||||
|
|
||||||
|
|
||||||
|
def task_with_instruction(process_instance_id: int) -> Response:
|
||||||
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
|
spiff_task = processor.next_task()
|
||||||
|
task = None
|
||||||
|
if spiff_task is not None:
|
||||||
|
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||||
|
try:
|
||||||
|
instructions = _render_instructions(spiff_task)
|
||||||
|
except Exception as exception:
|
||||||
|
raise ApiError(
|
||||||
|
error_code="engine_steps_error",
|
||||||
|
message=f"Failed to complete an automated task. Error was: {str(exception)}",
|
||||||
|
status_code=400,
|
||||||
|
) from exception
|
||||||
|
task.properties = {"instructionsForEndUser": instructions}
|
||||||
|
return make_response(jsonify({"task": task}), 200)
|
||||||
|
|
||||||
|
|
||||||
|
def _render_instructions(spiff_task: SpiffTask) -> str:
|
||||||
|
return JinjaService.render_instructions_for_end_user(spiff_task)
|
||||||
|
|
||||||
|
|
||||||
def _interstitial_stream(
|
def _interstitial_stream(
|
||||||
process_instance: ProcessInstanceModel,
|
process_instance: ProcessInstanceModel,
|
||||||
execute_tasks: bool = True,
|
execute_tasks: bool = True,
|
||||||
|
@ -551,12 +591,6 @@ def _interstitial_stream(
|
||||||
state=TaskState.WAITING | TaskState.STARTED | TaskState.READY | TaskState.ERROR
|
state=TaskState.WAITING | TaskState.STARTED | TaskState.READY | TaskState.ERROR
|
||||||
)
|
)
|
||||||
|
|
||||||
def render_instructions(spiff_task: SpiffTask) -> str:
|
|
||||||
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
|
|
||||||
if task_model is None:
|
|
||||||
return ""
|
|
||||||
return JinjaService.render_instructions_for_end_user(task_model)
|
|
||||||
|
|
||||||
# do not attempt to get task instructions if process instance is suspended or was terminated
|
# do not attempt to get task instructions if process instance is suspended or was terminated
|
||||||
if process_instance.status in ["suspended", "terminated"]:
|
if process_instance.status in ["suspended", "terminated"]:
|
||||||
yield _render_data("unrunnable_instance", process_instance)
|
yield _render_data("unrunnable_instance", process_instance)
|
||||||
|
@ -571,7 +605,7 @@ def _interstitial_stream(
|
||||||
# ignore the instructions if they are on the EndEvent for the top level process
|
# ignore the instructions if they are on the EndEvent for the top level process
|
||||||
if not TaskService.is_main_process_end_event(spiff_task):
|
if not TaskService.is_main_process_end_event(spiff_task):
|
||||||
try:
|
try:
|
||||||
instructions = render_instructions(spiff_task)
|
instructions = _render_instructions(spiff_task)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
api_error = ApiError(
|
api_error = ApiError(
|
||||||
error_code="engine_steps_error",
|
error_code="engine_steps_error",
|
||||||
|
@ -644,11 +678,10 @@ def _interstitial_stream(
|
||||||
tasks = get_reportable_tasks(processor)
|
tasks = get_reportable_tasks(processor)
|
||||||
|
|
||||||
spiff_task = processor.next_task()
|
spiff_task = processor.next_task()
|
||||||
if spiff_task is not None:
|
if spiff_task is not None and spiff_task.id not in reported_ids:
|
||||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||||
if task.id not in reported_ids:
|
|
||||||
try:
|
try:
|
||||||
instructions = render_instructions(spiff_task)
|
instructions = _render_instructions(spiff_task)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
api_error = ApiError(
|
api_error = ApiError(
|
||||||
error_code="engine_steps_error",
|
error_code="engine_steps_error",
|
||||||
|
@ -876,30 +909,28 @@ def _task_submit_shared(
|
||||||
db.session.delete(task_draft_data)
|
db.session.delete(task_draft_data)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
next_human_task_assigned_to_me = (
|
next_human_task_assigned_to_me = _next_human_task_for_user(process_instance_id, principal.user_id)
|
||||||
HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False)
|
|
||||||
.order_by(asc(HumanTaskModel.id)) # type: ignore
|
|
||||||
.join(HumanTaskUserModel)
|
|
||||||
.filter_by(user_id=principal.user_id)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if next_human_task_assigned_to_me:
|
if next_human_task_assigned_to_me:
|
||||||
return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200)
|
return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200)
|
||||||
|
|
||||||
|
queue_process_instance_if_appropriate(process_instance)
|
||||||
|
|
||||||
|
# a guest user completed a task, it has a guest_confirmation message to display to them,
|
||||||
|
# and there is nothing else for them to do
|
||||||
spiff_task_extensions = spiff_task.task_spec.extensions
|
spiff_task_extensions = spiff_task.task_spec.extensions
|
||||||
if (
|
if (
|
||||||
"allowGuest" in spiff_task_extensions
|
"allowGuest" in spiff_task_extensions
|
||||||
and spiff_task_extensions["allowGuest"] == "true"
|
and spiff_task_extensions["allowGuest"] == "true"
|
||||||
and "guestConfirmation" in spiff_task.task_spec.extensions
|
and "guestConfirmation" in spiff_task.task_spec.extensions
|
||||||
):
|
):
|
||||||
return make_response(
|
return make_response(jsonify({"guest_confirmation": spiff_task.task_spec.extensions["guestConfirmation"]}), 200)
|
||||||
jsonify({"guest_confirmation": spiff_task.task_spec.extensions["guestConfirmation"]}), 200
|
|
||||||
)
|
|
||||||
|
|
||||||
if processor.next_task():
|
if processor.next_task():
|
||||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
|
task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
|
||||||
|
task.process_model_uses_queued_execution = queue_enabled_for_process_model(process_instance)
|
||||||
return make_response(jsonify(task), 200)
|
return make_response(jsonify(task), 200)
|
||||||
|
|
||||||
|
# next_task always returns something, even if the instance is complete, so we never get here
|
||||||
return Response(
|
return Response(
|
||||||
json.dumps(
|
json.dumps(
|
||||||
{
|
{
|
||||||
|
@ -961,9 +992,7 @@ def _get_tasks(
|
||||||
if user_group_identifier:
|
if user_group_identifier:
|
||||||
human_tasks_query = human_tasks_query.filter(GroupModel.identifier == user_group_identifier)
|
human_tasks_query = human_tasks_query.filter(GroupModel.identifier == user_group_identifier)
|
||||||
else:
|
else:
|
||||||
human_tasks_query = human_tasks_query.filter(
|
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_not(None)) # type: ignore
|
||||||
HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
|
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
|
||||||
|
|
||||||
|
@ -1147,15 +1176,15 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task_data: dict)
|
||||||
|
|
||||||
|
|
||||||
def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
|
def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
|
||||||
potential_owner_usernames_from_group_concat_or_similar = func.group_concat(
|
potential_owner_usernames_from_group_concat_or_similar = func.group_concat(assigned_user.username.distinct()).label(
|
||||||
assigned_user.username.distinct()
|
"potential_owner_usernames"
|
||||||
).label("potential_owner_usernames")
|
)
|
||||||
db_type = current_app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE")
|
db_type = current_app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE")
|
||||||
|
|
||||||
if db_type == "postgres":
|
if db_type == "postgres":
|
||||||
potential_owner_usernames_from_group_concat_or_similar = func.string_agg(
|
potential_owner_usernames_from_group_concat_or_similar = func.string_agg(assigned_user.username.distinct(), ", ").label(
|
||||||
assigned_user.username.distinct(), ", "
|
"potential_owner_usernames"
|
||||||
).label("potential_owner_usernames")
|
)
|
||||||
|
|
||||||
return potential_owner_usernames_from_group_concat_or_similar
|
return potential_owner_usernames_from_group_concat_or_similar
|
||||||
|
|
||||||
|
@ -1179,10 +1208,7 @@ def _find_human_task_or_raise(
|
||||||
raise (
|
raise (
|
||||||
ApiError(
|
ApiError(
|
||||||
error_code="no_human_task",
|
error_code="no_human_task",
|
||||||
message=(
|
message=f"Cannot find a task to complete for task id '{task_guid}' and process instance {process_instance_id}.",
|
||||||
f"Cannot find a task to complete for task id '{task_guid}' and"
|
|
||||||
f" process instance {process_instance_id}."
|
|
||||||
),
|
|
||||||
status_code=500,
|
status_code=500,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -1206,9 +1232,7 @@ def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(form_ui_schema: di
|
||||||
|
|
||||||
|
|
||||||
def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int) -> TaskModel:
|
def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int) -> TaskModel:
|
||||||
task_model: TaskModel | None = TaskModel.query.filter_by(
|
task_model: TaskModel | None = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first()
|
||||||
guid=task_guid, process_instance_id=process_instance_id
|
|
||||||
).first()
|
|
||||||
if task_model is None:
|
if task_model is None:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
error_code="task_not_found",
|
error_code="task_not_found",
|
||||||
|
@ -1216,3 +1240,14 @@ def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int)
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
return task_model
|
return task_model
|
||||||
|
|
||||||
|
|
||||||
|
def _next_human_task_for_user(process_instance_id: int, user_id: int) -> HumanTaskModel | None:
|
||||||
|
next_human_task: HumanTaskModel | None = (
|
||||||
|
HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False)
|
||||||
|
.order_by(asc(HumanTaskModel.id)) # type: ignore
|
||||||
|
.join(HumanTaskUserModel)
|
||||||
|
.filter_by(user_id=user_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
return next_human_task
|
||||||
|
|
|
@ -24,9 +24,7 @@ class GetGroupMembers(Script):
|
||||||
group_identifier = args[0]
|
group_identifier = args[0]
|
||||||
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||||
if group is None:
|
if group is None:
|
||||||
raise GroupNotFoundError(
|
raise GroupNotFoundError(f"Script 'get_group_members' could not find group with identifier '{group_identifier}'.")
|
||||||
f"Script 'get_group_members' could not find group with identifier '{group_identifier}'."
|
|
||||||
)
|
|
||||||
|
|
||||||
usernames = [u.username for u in group.users]
|
usernames = [u.username for u in group.users]
|
||||||
return usernames
|
return usernames
|
||||||
|
|
|
@ -30,8 +30,7 @@ class GetUrlForTaskWithBpmnIdentifier(Script):
|
||||||
desired_spiff_task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(bpmn_identifier, spiff_task.workflow)
|
desired_spiff_task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(bpmn_identifier, spiff_task.workflow)
|
||||||
if desired_spiff_task is None:
|
if desired_spiff_task is None:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Could not find a task with bpmn identifier '{bpmn_identifier}' in"
|
f"Could not find a task with bpmn identifier '{bpmn_identifier}' in get_url_for_task_with_bpmn_identifier"
|
||||||
" get_url_for_task_with_bpmn_identifier"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not desired_spiff_task.task_spec.manual:
|
if not desired_spiff_task.task_spec.manual:
|
||||||
|
|
|
@ -30,17 +30,12 @@ class GetMarkdownFileDownloadLink(Script):
|
||||||
process_model_identifier = script_attributes_context.process_model_identifier
|
process_model_identifier = script_attributes_context.process_model_identifier
|
||||||
if process_model_identifier is None:
|
if process_model_identifier is None:
|
||||||
raise self.get_proces_model_identifier_is_missing_error("markdown_file_download_link")
|
raise self.get_proces_model_identifier_is_missing_error("markdown_file_download_link")
|
||||||
modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(
|
modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(process_model_identifier)
|
||||||
process_model_identifier
|
|
||||||
)
|
|
||||||
process_instance_id = script_attributes_context.process_instance_id
|
process_instance_id = script_attributes_context.process_instance_id
|
||||||
if process_instance_id is None:
|
if process_instance_id is None:
|
||||||
raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata")
|
raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata")
|
||||||
url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"]
|
url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"]
|
||||||
url += (
|
url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + f"{process_instance_id}/{digest}"
|
||||||
f"/v1.0/process-data-file-download/{modified_process_model_identifier}/"
|
|
||||||
+ f"{process_instance_id}/{digest}"
|
|
||||||
)
|
|
||||||
link = f"[{label}]({url})"
|
link = f"[{label}]({url})"
|
||||||
|
|
||||||
return link
|
return link
|
||||||
|
|
|
@ -123,13 +123,10 @@ class Script:
|
||||||
f" running script '{script_function_name}'"
|
f" running script '{script_function_name}'"
|
||||||
)
|
)
|
||||||
user = process_instance.process_initiator
|
user = process_instance.process_initiator
|
||||||
has_permission = AuthorizationService.user_has_permission(
|
has_permission = AuthorizationService.user_has_permission(user=user, permission="create", target_uri=uri)
|
||||||
user=user, permission="create", target_uri=uri
|
|
||||||
)
|
|
||||||
if not has_permission:
|
if not has_permission:
|
||||||
raise ScriptUnauthorizedForUserError(
|
raise ScriptUnauthorizedForUserError(
|
||||||
f"User {user.username} does not have access to run"
|
f"User {user.username} does not have access to run privileged script '{script_function_name}'"
|
||||||
f" privileged script '{script_function_name}'"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
|
def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
|
||||||
|
|
|
@ -20,9 +20,7 @@ class SetUserProperties(Script):
|
||||||
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
|
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
|
||||||
properties = args[0]
|
properties = args[0]
|
||||||
if not isinstance(properties, dict):
|
if not isinstance(properties, dict):
|
||||||
raise InvalidArgsGivenToScriptError(
|
raise InvalidArgsGivenToScriptError(f"Args to set_user_properties must be a dict. '{properties}' is invalid.")
|
||||||
f"Args to set_user_properties must be a dict. '{properties}' is invalid."
|
|
||||||
)
|
|
||||||
# consider using engine-specific insert or update metaphor in future: https://stackoverflow.com/a/68431412/6090676
|
# consider using engine-specific insert or update metaphor in future: https://stackoverflow.com/a/68431412/6090676
|
||||||
for property_key, property_value in properties.items():
|
for property_key, property_value in properties.items():
|
||||||
user_property = UserPropertyModel.query.filter_by(user_id=g.user.id, key=property_key).first()
|
user_property = UserPropertyModel.query.filter_by(user_id=g.user.id, key=property_key).first()
|
||||||
|
|
|
@ -48,9 +48,7 @@ class AuthenticationOptionNotFoundError(Exception):
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationService:
|
class AuthenticationService:
|
||||||
ENDPOINT_CACHE: dict[str, dict[str, str]] = (
|
ENDPOINT_CACHE: dict[str, dict[str, str]] = {} # We only need to find the openid endpoints once, then we can cache them.
|
||||||
{}
|
|
||||||
) # We only need to find the openid endpoints once, then we can cache them.
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def authentication_options_for_api(cls) -> list[AuthenticationOptionForApi]:
|
def authentication_options_for_api(cls) -> list[AuthenticationOptionForApi]:
|
||||||
|
@ -72,9 +70,7 @@ class AuthenticationService:
|
||||||
if config["identifier"] == authentication_identifier:
|
if config["identifier"] == authentication_identifier:
|
||||||
return_config: AuthenticationOption = config
|
return_config: AuthenticationOption = config
|
||||||
return return_config
|
return return_config
|
||||||
raise AuthenticationOptionNotFoundError(
|
raise AuthenticationOptionNotFoundError(f"Could not find a config with identifier '{authentication_identifier}'")
|
||||||
f"Could not find a config with identifier '{authentication_identifier}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def client_id(cls, authentication_identifier: str) -> str:
|
def client_id(cls, authentication_identifier: str) -> str:
|
||||||
|
@ -119,9 +115,7 @@ class AuthenticationService:
|
||||||
if redirect_url is None:
|
if redirect_url is None:
|
||||||
redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
|
redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
|
||||||
request_url = (
|
request_url = (
|
||||||
self.__class__.open_id_endpoint_for_name(
|
self.__class__.open_id_endpoint_for_name("end_session_endpoint", authentication_identifier=authentication_identifier)
|
||||||
"end_session_endpoint", authentication_identifier=authentication_identifier
|
|
||||||
)
|
|
||||||
+ f"?post_logout_redirect_uri={redirect_url}&"
|
+ f"?post_logout_redirect_uri={redirect_url}&"
|
||||||
+ f"id_token_hint={id_token}"
|
+ f"id_token_hint={id_token}"
|
||||||
)
|
)
|
||||||
|
@ -135,14 +129,10 @@ class AuthenticationService:
|
||||||
)
|
)
|
||||||
return state
|
return state
|
||||||
|
|
||||||
def get_login_redirect_url(
|
def get_login_redirect_url(self, state: str, authentication_identifier: str, redirect_url: str = "/v1.0/login_return") -> str:
|
||||||
self, state: str, authentication_identifier: str, redirect_url: str = "/v1.0/login_return"
|
|
||||||
) -> str:
|
|
||||||
return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
|
return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
|
||||||
login_redirect_url = (
|
login_redirect_url = (
|
||||||
self.__class__.open_id_endpoint_for_name(
|
self.open_id_endpoint_for_name("authorization_endpoint", authentication_identifier=authentication_identifier)
|
||||||
"authorization_endpoint", authentication_identifier=authentication_identifier
|
|
||||||
)
|
|
||||||
+ f"?state={state}&"
|
+ f"?state={state}&"
|
||||||
+ "response_type=code&"
|
+ "response_type=code&"
|
||||||
+ f"client_id={self.client_id(authentication_identifier)}&"
|
+ f"client_id={self.client_id(authentication_identifier)}&"
|
||||||
|
@ -151,9 +141,7 @@ class AuthenticationService:
|
||||||
)
|
)
|
||||||
return login_redirect_url
|
return login_redirect_url
|
||||||
|
|
||||||
def get_auth_token_object(
|
def get_auth_token_object(self, code: str, authentication_identifier: str, redirect_url: str = "/v1.0/login_return") -> dict:
|
||||||
self, code: str, authentication_identifier: str, redirect_url: str = "/v1.0/login_return"
|
|
||||||
) -> dict:
|
|
||||||
backend_basic_auth_string = (
|
backend_basic_auth_string = (
|
||||||
f"{self.client_id(authentication_identifier)}:{self.__class__.secret_key(authentication_identifier)}"
|
f"{self.client_id(authentication_identifier)}:{self.__class__.secret_key(authentication_identifier)}"
|
||||||
)
|
)
|
||||||
|
@ -169,9 +157,7 @@ class AuthenticationService:
|
||||||
"redirect_uri": f"{self.get_backend_url()}{redirect_url}",
|
"redirect_uri": f"{self.get_backend_url()}{redirect_url}",
|
||||||
}
|
}
|
||||||
|
|
||||||
request_url = self.open_id_endpoint_for_name(
|
request_url = self.open_id_endpoint_for_name("token_endpoint", authentication_identifier=authentication_identifier)
|
||||||
"token_endpoint", authentication_identifier=authentication_identifier
|
|
||||||
)
|
|
||||||
|
|
||||||
response = requests.post(request_url, data=data, headers=headers, timeout=HTTP_REQUEST_TIMEOUT_SECONDS)
|
response = requests.post(request_url, data=data, headers=headers, timeout=HTTP_REQUEST_TIMEOUT_SECONDS)
|
||||||
auth_token_object: dict = json.loads(response.text)
|
auth_token_object: dict = json.loads(response.text)
|
||||||
|
@ -203,15 +189,13 @@ class AuthenticationService:
|
||||||
|
|
||||||
if iss != cls.server_url(authentication_identifier):
|
if iss != cls.server_url(authentication_identifier):
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
f"TOKEN INVALID because ISS '{iss}' does not match server url"
|
f"TOKEN INVALID because ISS '{iss}' does not match server url '{cls.server_url(authentication_identifier)}'"
|
||||||
f" '{cls.server_url(authentication_identifier)}'"
|
|
||||||
)
|
)
|
||||||
valid = False
|
valid = False
|
||||||
# aud could be an array or a string
|
# aud could be an array or a string
|
||||||
elif len(overlapping_aud_values) < 1:
|
elif len(overlapping_aud_values) < 1:
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
f"TOKEN INVALID because audience '{aud}' does not match client id"
|
f"TOKEN INVALID because audience '{aud}' does not match client id '{cls.client_id(authentication_identifier)}'"
|
||||||
f" '{cls.client_id(authentication_identifier)}'"
|
|
||||||
)
|
)
|
||||||
valid = False
|
valid = False
|
||||||
elif azp and azp not in (
|
elif azp and azp not in (
|
||||||
|
@ -219,15 +203,12 @@ class AuthenticationService:
|
||||||
"account",
|
"account",
|
||||||
):
|
):
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
f"TOKEN INVALID because azp '{azp}' does not match client id"
|
f"TOKEN INVALID because azp '{azp}' does not match client id '{cls.client_id(authentication_identifier)}'"
|
||||||
f" '{cls.client_id(authentication_identifier)}'"
|
|
||||||
)
|
)
|
||||||
valid = False
|
valid = False
|
||||||
# make sure issued at time is not in the future
|
# make sure issued at time is not in the future
|
||||||
elif now + iat_clock_skew_leeway < iat:
|
elif now + iat_clock_skew_leeway < iat:
|
||||||
current_app.logger.error(
|
current_app.logger.error(f"TOKEN INVALID because iat '{iat}' is in the future relative to server now '{now}'")
|
||||||
f"TOKEN INVALID because iat '{iat}' is in the future relative to server now '{now}'"
|
|
||||||
)
|
|
||||||
valid = False
|
valid = False
|
||||||
|
|
||||||
if valid and now > decoded_token["exp"]:
|
if valid and now > decoded_token["exp"]:
|
||||||
|
@ -264,9 +245,7 @@ class AuthenticationService:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_refresh_token(user_id: int) -> str | None:
|
def get_refresh_token(user_id: int) -> str | None:
|
||||||
refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter(
|
refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter(RefreshTokenModel.user_id == user_id).first()
|
||||||
RefreshTokenModel.user_id == user_id
|
|
||||||
).first()
|
|
||||||
if refresh_token_object:
|
if refresh_token_object:
|
||||||
return refresh_token_object.token
|
return refresh_token_object.token
|
||||||
return None
|
return None
|
||||||
|
@ -274,9 +253,7 @@ class AuthenticationService:
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_auth_token_from_refresh_token(cls, refresh_token: str, authentication_identifier: str) -> dict:
|
def get_auth_token_from_refresh_token(cls, refresh_token: str, authentication_identifier: str) -> dict:
|
||||||
"""Converts a refresh token to an Auth Token by calling the openid's auth endpoint."""
|
"""Converts a refresh token to an Auth Token by calling the openid's auth endpoint."""
|
||||||
backend_basic_auth_string = (
|
backend_basic_auth_string = f"{cls.client_id(authentication_identifier)}:{cls.secret_key(authentication_identifier)}"
|
||||||
f"{cls.client_id(authentication_identifier)}:{cls.secret_key(authentication_identifier)}"
|
|
||||||
)
|
|
||||||
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
||||||
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
|
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
|
||||||
headers = {
|
headers = {
|
||||||
|
@ -291,9 +268,7 @@ class AuthenticationService:
|
||||||
"client_secret": cls.secret_key(authentication_identifier),
|
"client_secret": cls.secret_key(authentication_identifier),
|
||||||
}
|
}
|
||||||
|
|
||||||
request_url = cls.open_id_endpoint_for_name(
|
request_url = cls.open_id_endpoint_for_name("token_endpoint", authentication_identifier=authentication_identifier)
|
||||||
"token_endpoint", authentication_identifier=authentication_identifier
|
|
||||||
)
|
|
||||||
|
|
||||||
response = requests.post(request_url, data=data, headers=headers, timeout=HTTP_REQUEST_TIMEOUT_SECONDS)
|
response = requests.post(request_url, data=data, headers=headers, timeout=HTTP_REQUEST_TIMEOUT_SECONDS)
|
||||||
auth_token_object: dict = json.loads(response.text)
|
auth_token_object: dict = json.loads(response.text)
|
||||||
|
|
|
@ -188,9 +188,7 @@ class AuthorizationService:
|
||||||
def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel:
|
def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel:
|
||||||
uri_with_percent = re.sub(r"\*", "%", uri)
|
uri_with_percent = re.sub(r"\*", "%", uri)
|
||||||
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
|
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
|
||||||
permission_target: PermissionTargetModel | None = PermissionTargetModel.query.filter_by(
|
permission_target: PermissionTargetModel | None = PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
|
||||||
uri=target_uri_normalized
|
|
||||||
).first()
|
|
||||||
if permission_target is None:
|
if permission_target is None:
|
||||||
permission_target = PermissionTargetModel(uri=target_uri_normalized)
|
permission_target = PermissionTargetModel(uri=target_uri_normalized)
|
||||||
db.session.add(permission_target)
|
db.session.add(permission_target)
|
||||||
|
@ -305,10 +303,7 @@ class AuthorizationService:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
raise NotAuthorizedError(
|
raise NotAuthorizedError(
|
||||||
(
|
f"User {g.user.username} is not authorized to perform requested action: {permission_string} - {request.path}",
|
||||||
f"User {g.user.username} is not authorized to perform requested action:"
|
|
||||||
f" {permission_string} - {request.path}"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -349,8 +344,7 @@ class AuthorizationService:
|
||||||
|
|
||||||
if human_task.completed:
|
if human_task.completed:
|
||||||
raise HumanTaskAlreadyCompletedError(
|
raise HumanTaskAlreadyCompletedError(
|
||||||
f"Human task with task guid '{task_guid}' for process instance '{process_instance_id}' has already"
|
f"Human task with task guid '{task_guid}' for process instance '{process_instance_id}' has already been completed"
|
||||||
" been completed"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if user not in human_task.potential_owners:
|
if user not in human_task.potential_owners:
|
||||||
|
@ -426,16 +420,13 @@ class AuthorizationService:
|
||||||
if desired_group_identifiers is not None:
|
if desired_group_identifiers is not None:
|
||||||
if not isinstance(desired_group_identifiers, list):
|
if not isinstance(desired_group_identifiers, list):
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
f"Invalid groups property in token: {desired_group_identifiers}."
|
f"Invalid groups property in token: {desired_group_identifiers}.If groups is specified, it must be a list"
|
||||||
"If groups is specified, it must be a list"
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
for desired_group_identifier in desired_group_identifiers:
|
for desired_group_identifier in desired_group_identifiers:
|
||||||
UserService.add_user_to_group_by_group_identifier(user_model, desired_group_identifier)
|
UserService.add_user_to_group_by_group_identifier(user_model, desired_group_identifier)
|
||||||
current_group_identifiers = [g.identifier for g in user_model.groups]
|
current_group_identifiers = [g.identifier for g in user_model.groups]
|
||||||
groups_to_remove_from_user = [
|
groups_to_remove_from_user = [item for item in current_group_identifiers if item not in desired_group_identifiers]
|
||||||
item for item in current_group_identifiers if item not in desired_group_identifiers
|
|
||||||
]
|
|
||||||
for gtrfu in groups_to_remove_from_user:
|
for gtrfu in groups_to_remove_from_user:
|
||||||
if gtrfu != current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]:
|
if gtrfu != current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]:
|
||||||
UserService.remove_user_from_group(user_model, gtrfu)
|
UserService.remove_user_from_group(user_model, gtrfu)
|
||||||
|
@ -524,17 +515,11 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/users/search"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/users/search"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/onboarding"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/onboarding"))
|
||||||
|
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/report-metadata"))
|
||||||
PermissionToAssign(permission="read", target_uri="/process-instances/report-metadata")
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*"))
|
||||||
)
|
|
||||||
permissions_to_assign.append(
|
|
||||||
PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*")
|
|
||||||
)
|
|
||||||
|
|
||||||
for permission in ["create", "read", "update", "delete"]:
|
for permission in ["create", "read", "update", "delete"]:
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/process-instances/reports/*"))
|
||||||
PermissionToAssign(permission=permission, target_uri="/process-instances/reports/*")
|
|
||||||
)
|
|
||||||
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*"))
|
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*"))
|
||||||
return permissions_to_assign
|
return permissions_to_assign
|
||||||
|
|
||||||
|
@ -551,9 +536,7 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentications"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentications"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentication/configuration"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentication/configuration"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentication_begin/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentication_begin/*"))
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(PermissionToAssign(permission="update", target_uri="/authentication/configuration"))
|
||||||
PermissionToAssign(permission="update", target_uri="/authentication/configuration")
|
|
||||||
)
|
|
||||||
|
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/service-accounts"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/service-accounts"))
|
||||||
|
|
||||||
|
@ -573,9 +556,7 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/messages/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/messages/*"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/messages"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/messages"))
|
||||||
|
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/can-run-privileged-script/*"))
|
||||||
PermissionToAssign(permission="create", target_uri="/can-run-privileged-script/*")
|
|
||||||
)
|
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/debug/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/debug/*"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/send-event/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/send-event/*"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-complete/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-complete/*"))
|
||||||
|
@ -731,8 +712,7 @@ class AuthorizationService:
|
||||||
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_ABSOLUTE_PATH"] is None:
|
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_ABSOLUTE_PATH"] is None:
|
||||||
raise (
|
raise (
|
||||||
PermissionsFileNotSetError(
|
PermissionsFileNotSetError(
|
||||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_ABSOLUTE_PATH needs to be set in order to import"
|
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_ABSOLUTE_PATH needs to be set in order to import permissions"
|
||||||
" permissions"
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -761,9 +741,7 @@ class AuthorizationService:
|
||||||
uri = permission_config["uri"]
|
uri = permission_config["uri"]
|
||||||
actions = cls.get_permissions_from_config(permission_config)
|
actions = cls.get_permissions_from_config(permission_config)
|
||||||
for group_identifier in permission_config["groups"]:
|
for group_identifier in permission_config["groups"]:
|
||||||
group_permissions_by_group[group_identifier]["permissions"].append(
|
group_permissions_by_group[group_identifier]["permissions"].append({"actions": actions, "uri": uri})
|
||||||
{"actions": actions, "uri": uri}
|
|
||||||
)
|
|
||||||
|
|
||||||
return list(group_permissions_by_group.values())
|
return list(group_permissions_by_group.values())
|
||||||
|
|
||||||
|
@ -881,9 +859,7 @@ class AuthorizationService:
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def refresh_permissions(
|
def refresh_permissions(cls, group_permissions: list[GroupPermissionsDict], group_permissions_only: bool = False) -> None:
|
||||||
cls, group_permissions: list[GroupPermissionsDict], group_permissions_only: bool = False
|
|
||||||
) -> None:
|
|
||||||
"""Adds new permission assignments and deletes old ones."""
|
"""Adds new permission assignments and deletes old ones."""
|
||||||
initial_permission_assignments = (
|
initial_permission_assignments = (
|
||||||
PermissionAssignmentModel.query.outerjoin(
|
PermissionAssignmentModel.query.outerjoin(
|
||||||
|
|
|
@ -43,9 +43,7 @@ class ElementUnitsService:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def workflow_from_cached_element_unit(
|
def workflow_from_cached_element_unit(cls, cache_key: str, process_id: str, element_id: str) -> BpmnSpecDict | None:
|
||||||
cls, cache_key: str, process_id: str, element_id: str
|
|
||||||
) -> BpmnSpecDict | None:
|
|
||||||
if not cls._enabled():
|
if not cls._enabled():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -62,9 +60,7 @@ class ElementUnitsService:
|
||||||
|
|
||||||
current_app.logger.debug(f"Checking element unit cache @ {cache_key} :: '{process_id}' - '{element_id}'")
|
current_app.logger.debug(f"Checking element unit cache @ {cache_key} :: '{process_id}' - '{element_id}'")
|
||||||
|
|
||||||
bpmn_spec_json = spiff_element_units.workflow_from_cached_element_unit(
|
bpmn_spec_json = spiff_element_units.workflow_from_cached_element_unit(cache_dir, cache_key, process_id, element_id)
|
||||||
cache_dir, cache_key, process_id, element_id
|
|
||||||
)
|
|
||||||
return json.loads(bpmn_spec_json) # type: ignore
|
return json.loads(bpmn_spec_json) # type: ignore
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
current_app.logger.exception(e)
|
current_app.logger.exception(e)
|
||||||
|
|
|
@ -34,9 +34,7 @@ class FileSystemService:
|
||||||
PROCESS_MODEL_JSON_FILE = "process_model.json"
|
PROCESS_MODEL_JSON_FILE = "process_model.json"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def walk_files(
|
def walk_files(cls, start_dir: str, directory_predicate: DirectoryPredicate, file_predicate: FilePredicate) -> FileGenerator:
|
||||||
cls, start_dir: str, directory_predicate: DirectoryPredicate, file_predicate: FilePredicate
|
|
||||||
) -> FileGenerator:
|
|
||||||
depth = 0
|
depth = 0
|
||||||
for root, subdirs, files in os.walk(start_dir):
|
for root, subdirs, files in os.walk(start_dir):
|
||||||
if directory_predicate:
|
if directory_predicate:
|
||||||
|
@ -120,9 +118,7 @@ class FileSystemService:
|
||||||
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
|
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
|
||||||
full_file_path = FileSystemService.full_file_path(process_model_info, file_name)
|
full_file_path = FileSystemService.full_file_path(process_model_info, file_name)
|
||||||
if not os.path.exists(full_file_path):
|
if not os.path.exists(full_file_path):
|
||||||
raise ProcessModelFileNotFoundError(
|
raise ProcessModelFileNotFoundError(f"No file found with name {file_name} in {process_model_info.display_name}")
|
||||||
f"No file found with name {file_name} in {process_model_info.display_name}"
|
|
||||||
)
|
|
||||||
with open(full_file_path, "rb") as f_handle:
|
with open(full_file_path, "rb") as f_handle:
|
||||||
spec_file_data = f_handle.read()
|
spec_file_data = f_handle.read()
|
||||||
return spec_file_data
|
return spec_file_data
|
||||||
|
|
|
@ -172,9 +172,7 @@ class GitService:
|
||||||
my_env = os.environ.copy()
|
my_env = os.environ.copy()
|
||||||
my_env["GIT_COMMITTER_NAME"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown"
|
my_env["GIT_COMMITTER_NAME"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown"
|
||||||
|
|
||||||
my_env["GIT_COMMITTER_EMAIL"] = (
|
my_env["GIT_COMMITTER_EMAIL"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL") or "unknown@example.org"
|
||||||
current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL") or "unknown@example.org"
|
|
||||||
)
|
|
||||||
|
|
||||||
# SSH authentication can be also provided via gitconfig.
|
# SSH authentication can be also provided via gitconfig.
|
||||||
ssh_key_path = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH")
|
ssh_key_path = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH")
|
||||||
|
@ -206,9 +204,7 @@ class GitService:
|
||||||
@classmethod
|
@classmethod
|
||||||
def handle_web_hook(cls, webhook: dict) -> bool:
|
def handle_web_hook(cls, webhook: dict) -> bool:
|
||||||
if "repository" not in webhook or "clone_url" not in webhook["repository"]:
|
if "repository" not in webhook or "clone_url" not in webhook["repository"]:
|
||||||
raise InvalidGitWebhookBodyError(
|
raise InvalidGitWebhookBodyError(f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}")
|
||||||
f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}"
|
|
||||||
)
|
|
||||||
repo = webhook["repository"]
|
repo = webhook["repository"]
|
||||||
valid_clone_urls = [repo["clone_url"], repo["git_url"], repo["ssh_url"]]
|
valid_clone_urls = [repo["clone_url"], repo["git_url"], repo["ssh_url"]]
|
||||||
bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||||
|
@ -217,8 +213,7 @@ class GitService:
|
||||||
)
|
)
|
||||||
if config_clone_url not in valid_clone_urls:
|
if config_clone_url not in valid_clone_urls:
|
||||||
raise GitCloneUrlMismatchError(
|
raise GitCloneUrlMismatchError(
|
||||||
f"Configured clone url does not match the repo URLs from webhook: {config_clone_url} =/="
|
f"Configured clone url does not match the repo URLs from webhook: {config_clone_url} =/= {valid_clone_urls}"
|
||||||
f" {valid_clone_urls}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test webhook requests have a zen koan and hook info.
|
# Test webhook requests have a zen koan and hook info.
|
||||||
|
@ -282,9 +277,7 @@ class GitService:
|
||||||
if cls.run_shell_command_as_boolean(command, context_directory=destination_process_root):
|
if cls.run_shell_command_as_boolean(command, context_directory=destination_process_root):
|
||||||
cls.run_shell_command(["checkout", branch_to_pull_request], context_directory=destination_process_root)
|
cls.run_shell_command(["checkout", branch_to_pull_request], context_directory=destination_process_root)
|
||||||
else:
|
else:
|
||||||
cls.run_shell_command(
|
cls.run_shell_command(["checkout", "-b", branch_to_pull_request], context_directory=destination_process_root)
|
||||||
["checkout", "-b", branch_to_pull_request], context_directory=destination_process_root
|
|
||||||
)
|
|
||||||
|
|
||||||
# copy files from process model into the new publish branch
|
# copy files from process model into the new publish branch
|
||||||
destination_process_model_path = os.path.join(destination_process_root, process_model_id)
|
destination_process_model_path = os.path.join(destination_process_root, process_model_id)
|
||||||
|
@ -294,8 +287,7 @@ class GitService:
|
||||||
|
|
||||||
# add and commit files to branch_to_pull_request, then push
|
# add and commit files to branch_to_pull_request, then push
|
||||||
commit_message = (
|
commit_message = (
|
||||||
f"Request to publish changes to {process_model_id}, "
|
f"Request to publish changes to {process_model_id}, from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}"
|
||||||
f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}"
|
|
||||||
)
|
)
|
||||||
cls.commit(commit_message, destination_process_root, branch_to_pull_request)
|
cls.commit(commit_message, destination_process_root, branch_to_pull_request)
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ class JinjaService:
|
||||||
if extensions is None:
|
if extensions is None:
|
||||||
if isinstance(task, TaskModel):
|
if isinstance(task, TaskModel):
|
||||||
extensions = TaskService.get_extensions_from_task_model(task)
|
extensions = TaskService.get_extensions_from_task_model(task)
|
||||||
else:
|
elif hasattr(task.task_spec, "extensions"):
|
||||||
extensions = task.task_spec.extensions
|
extensions = task.task_spec.extensions
|
||||||
if extensions and "instructionsForEndUser" in extensions:
|
if extensions and "instructionsForEndUser" in extensions:
|
||||||
if extensions["instructionsForEndUser"]:
|
if extensions["instructionsForEndUser"]:
|
||||||
|
|
|
@ -49,9 +49,7 @@ class JsonFormatter(logging.Formatter):
|
||||||
return "asctime" in self.fmt_dict.values()
|
return "asctime" in self.fmt_dict.values()
|
||||||
|
|
||||||
# we are overriding a method that returns a string and returning a dict, hence the Any
|
# we are overriding a method that returns a string and returning a dict, hence the Any
|
||||||
def formatMessage( # noqa: N802, this is overriding a method from python's stdlib
|
def formatMessage(self, record: logging.LogRecord) -> Any: # noqa: N802, this is overriding a method from python's stdlib
|
||||||
self, record: logging.LogRecord
|
|
||||||
) -> Any:
|
|
||||||
"""Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string.
|
"""Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string.
|
||||||
|
|
||||||
KeyError is raised if an unknown attribute is provided in the fmt_dict.
|
KeyError is raised if an unknown attribute is provided in the fmt_dict.
|
||||||
|
@ -90,9 +88,7 @@ def setup_logger(app: Flask) -> None:
|
||||||
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
|
|
||||||
if upper_log_level_string not in log_levels:
|
if upper_log_level_string not in log_levels:
|
||||||
raise InvalidLogLevelError(
|
raise InvalidLogLevelError(f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}")
|
||||||
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}"
|
|
||||||
)
|
|
||||||
|
|
||||||
log_level = getattr(logging, upper_log_level_string)
|
log_level = getattr(logging, upper_log_level_string)
|
||||||
spiff_log_level = getattr(logging, upper_log_level_string)
|
spiff_log_level = getattr(logging, upper_log_level_string)
|
||||||
|
@ -119,9 +115,7 @@ def setup_logger(app: Flask) -> None:
|
||||||
|
|
||||||
spiff_logger_filehandler = None
|
spiff_logger_filehandler = None
|
||||||
if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]:
|
if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]:
|
||||||
spiff_logger_filehandler = logging.FileHandler(
|
spiff_logger_filehandler = logging.FileHandler(f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log")
|
||||||
f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log"
|
|
||||||
)
|
|
||||||
spiff_logger_filehandler.setLevel(spiff_log_level)
|
spiff_logger_filehandler.setLevel(spiff_log_level)
|
||||||
spiff_logger_filehandler.setFormatter(log_formatter)
|
spiff_logger_filehandler.setFormatter(log_formatter)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,108 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import connexion # type: ignore
|
||||||
|
import flask.wrappers
|
||||||
|
import sentry_sdk
|
||||||
|
from prometheus_flask_exporter import ConnexionPrometheusMetrics # type: ignore
|
||||||
|
from sentry_sdk.integrations.flask import FlaskIntegration
|
||||||
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
||||||
|
|
||||||
|
def get_version_info_data() -> dict[str, Any]:
|
||||||
|
version_info_data_dict = {}
|
||||||
|
if os.path.isfile("version_info.json"):
|
||||||
|
with open("version_info.json") as f:
|
||||||
|
version_info_data_dict = json.load(f)
|
||||||
|
return version_info_data_dict
|
||||||
|
|
||||||
|
|
||||||
|
def setup_prometheus_metrics(app: flask.app.Flask, connexion_app: connexion.apps.flask_app.FlaskApp) -> None:
|
||||||
|
metrics = ConnexionPrometheusMetrics(connexion_app)
|
||||||
|
app.config["PROMETHEUS_METRICS"] = metrics
|
||||||
|
version_info_data = get_version_info_data()
|
||||||
|
if len(version_info_data) > 0:
|
||||||
|
# prometheus does not allow periods in key names
|
||||||
|
version_info_data_normalized = {k.replace(".", "_"): v for k, v in version_info_data.items()}
|
||||||
|
metrics.info("version_info", "Application Version Info", **version_info_data_normalized)
|
||||||
|
|
||||||
|
|
||||||
|
def traces_sampler(sampling_context: Any) -> Any:
|
||||||
|
# always inherit
|
||||||
|
if sampling_context["parent_sampled"] is not None:
|
||||||
|
return sampling_context["parent_sampled"]
|
||||||
|
|
||||||
|
if "wsgi_environ" in sampling_context:
|
||||||
|
wsgi_environ = sampling_context["wsgi_environ"]
|
||||||
|
path_info = wsgi_environ.get("PATH_INFO")
|
||||||
|
request_method = wsgi_environ.get("REQUEST_METHOD")
|
||||||
|
|
||||||
|
# tasks_controller.task_submit
|
||||||
|
# this is the current pain point as of 31 jan 2023.
|
||||||
|
if path_info and (
|
||||||
|
(path_info.startswith("/v1.0/tasks/") and request_method == "PUT")
|
||||||
|
or (path_info.startswith("/v1.0/task-data/") and request_method == "GET")
|
||||||
|
):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Default sample rate for all others (replaces traces_sample_rate)
|
||||||
|
return 0.01
|
||||||
|
|
||||||
|
|
||||||
|
def configure_sentry(app: flask.app.Flask) -> None:
|
||||||
|
# get rid of NotFound errors
|
||||||
|
def before_send(event: Any, hint: Any) -> Any:
|
||||||
|
if "exc_info" in hint:
|
||||||
|
_exc_type, exc_value, _tb = hint["exc_info"]
|
||||||
|
# NotFound is mostly from web crawlers
|
||||||
|
if isinstance(exc_value, NotFound):
|
||||||
|
return None
|
||||||
|
return event
|
||||||
|
|
||||||
|
sentry_errors_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE")
|
||||||
|
if sentry_errors_sample_rate is None:
|
||||||
|
raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow")
|
||||||
|
|
||||||
|
sentry_traces_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE")
|
||||||
|
if sentry_traces_sample_rate is None:
|
||||||
|
raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow")
|
||||||
|
|
||||||
|
sentry_env_identifier = app.config["ENV_IDENTIFIER"]
|
||||||
|
if app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER"):
|
||||||
|
sentry_env_identifier = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER")
|
||||||
|
|
||||||
|
sentry_configs = {
|
||||||
|
"dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
|
||||||
|
"integrations": [
|
||||||
|
FlaskIntegration(),
|
||||||
|
],
|
||||||
|
"environment": sentry_env_identifier,
|
||||||
|
# sample_rate is the errors sample rate. we usually set it to 1 (100%)
|
||||||
|
# so we get all errors in sentry.
|
||||||
|
"sample_rate": float(sentry_errors_sample_rate),
|
||||||
|
# Set traces_sample_rate to capture a certain percentage
|
||||||
|
# of transactions for performance monitoring.
|
||||||
|
# We recommend adjusting this value to less than 1(00%) in production.
|
||||||
|
"traces_sample_rate": float(sentry_traces_sample_rate),
|
||||||
|
"traces_sampler": traces_sampler,
|
||||||
|
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
||||||
|
"before_send": before_send,
|
||||||
|
}
|
||||||
|
|
||||||
|
# https://docs.sentry.io/platforms/python/configuration/releases
|
||||||
|
version_info_data = get_version_info_data()
|
||||||
|
if len(version_info_data) > 0:
|
||||||
|
git_commit = version_info_data.get("org.opencontainers.image.revision") or version_info_data.get("git_commit")
|
||||||
|
if git_commit is not None:
|
||||||
|
sentry_configs["release"] = git_commit
|
||||||
|
|
||||||
|
if app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED"):
|
||||||
|
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
|
||||||
|
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
|
||||||
|
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
||||||
|
if profiles_sample_rate > 0:
|
||||||
|
sentry_configs["_experiments"] = {"profiles_sample_rate": profiles_sample_rate}
|
||||||
|
|
||||||
|
sentry_sdk.init(**sentry_configs)
|
|
@ -25,15 +25,11 @@ class ProcessCallerService:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_caller(process_id: str, called_process_ids: list[str]) -> None:
|
def add_caller(process_id: str, called_process_ids: list[str]) -> None:
|
||||||
for called_process_id in called_process_ids:
|
for called_process_id in called_process_ids:
|
||||||
db.session.add(
|
db.session.add(ProcessCallerCacheModel(process_identifier=called_process_id, calling_process_identifier=process_id))
|
||||||
ProcessCallerCacheModel(process_identifier=called_process_id, calling_process_identifier=process_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def callers(process_ids: list[str]) -> list[str]:
|
def callers(process_ids: list[str]) -> list[str]:
|
||||||
records = (
|
records = (
|
||||||
db.session.query(ProcessCallerCacheModel)
|
db.session.query(ProcessCallerCacheModel).filter(ProcessCallerCacheModel.process_identifier.in_(process_ids)).all()
|
||||||
.filter(ProcessCallerCacheModel.process_identifier.in_(process_ids))
|
|
||||||
.all()
|
|
||||||
)
|
)
|
||||||
return sorted({r.calling_process_identifier for r in records})
|
return sorted({r.calling_process_identifier for r in records})
|
||||||
|
|
|
@ -18,8 +18,11 @@ class ProcessInstanceLockService:
|
||||||
"""TODO: comment."""
|
"""TODO: comment."""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def set_thread_local_locking_context(cls, domain: str) -> None:
|
def set_thread_local_locking_context(cls, domain: str, additional_processing_identifier: str | None = None) -> None:
|
||||||
current_app.config["THREAD_LOCAL_DATA"].lock_service_context = {
|
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||||
|
if not hasattr(tld, "lock_service_context"):
|
||||||
|
tld.lock_service_context = {}
|
||||||
|
tld.lock_service_context[additional_processing_identifier] = {
|
||||||
"domain": domain,
|
"domain": domain,
|
||||||
"uuid": current_app.config["PROCESS_UUID"],
|
"uuid": current_app.config["PROCESS_UUID"],
|
||||||
"thread_id": threading.get_ident(),
|
"thread_id": threading.get_ident(),
|
||||||
|
@ -27,45 +30,52 @@ class ProcessInstanceLockService:
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_thread_local_locking_context(cls) -> dict[str, Any]:
|
def get_thread_local_locking_context(cls, additional_processing_identifier: str | None = None) -> dict[str, Any]:
|
||||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||||
if not hasattr(tld, "lock_service_context"):
|
if not hasattr(tld, "lock_service_context"):
|
||||||
cls.set_thread_local_locking_context("web")
|
cls.set_thread_local_locking_context("web", additional_processing_identifier=additional_processing_identifier)
|
||||||
return tld.lock_service_context # type: ignore
|
return tld.lock_service_context[additional_processing_identifier] # type: ignore
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def locked_by(cls) -> str:
|
def locked_by(cls, additional_processing_identifier: str | None = None) -> str:
|
||||||
ctx = cls.get_thread_local_locking_context()
|
ctx = cls.get_thread_local_locking_context(additional_processing_identifier=additional_processing_identifier)
|
||||||
return f"{ctx['domain']}:{ctx['uuid']}:{ctx['thread_id']}"
|
return f"{ctx['domain']}:{ctx['uuid']}:{ctx['thread_id']}:{additional_processing_identifier}"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def lock(cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel) -> None:
|
def lock(
|
||||||
ctx = cls.get_thread_local_locking_context()
|
cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel, additional_processing_identifier: str | None = None
|
||||||
|
) -> None:
|
||||||
|
ctx = cls.get_thread_local_locking_context(additional_processing_identifier=additional_processing_identifier)
|
||||||
ctx["locks"][process_instance_id] = queue_entry
|
ctx["locks"][process_instance_id] = queue_entry
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def lock_many(cls, queue_entries: list[ProcessInstanceQueueModel]) -> list[int]:
|
def lock_many(
|
||||||
ctx = cls.get_thread_local_locking_context()
|
cls, queue_entries: list[ProcessInstanceQueueModel], additional_processing_identifier: str | None = None
|
||||||
|
) -> list[int]:
|
||||||
|
ctx = cls.get_thread_local_locking_context(additional_processing_identifier=additional_processing_identifier)
|
||||||
new_locks = {entry.process_instance_id: entry for entry in queue_entries}
|
new_locks = {entry.process_instance_id: entry for entry in queue_entries}
|
||||||
new_lock_ids = list(new_locks.keys())
|
new_lock_ids = list(new_locks.keys())
|
||||||
ctx["locks"].update(new_locks)
|
ctx["locks"].update(new_locks)
|
||||||
return new_lock_ids
|
return new_lock_ids
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def unlock(cls, process_instance_id: int) -> ProcessInstanceQueueModel:
|
def unlock(cls, process_instance_id: int, additional_processing_identifier: str | None = None) -> ProcessInstanceQueueModel:
|
||||||
queue_model = cls.try_unlock(process_instance_id)
|
queue_model = cls.try_unlock(process_instance_id, additional_processing_identifier=additional_processing_identifier)
|
||||||
if queue_model is None:
|
if queue_model is None:
|
||||||
raise ExpectedLockNotFoundError(f"Could not find a lock for process instance: {process_instance_id}")
|
raise ExpectedLockNotFoundError(f"Could not find a lock for process instance: {process_instance_id}")
|
||||||
return queue_model
|
return queue_model
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def try_unlock(cls, process_instance_id: int) -> ProcessInstanceQueueModel | None:
|
def try_unlock(
|
||||||
ctx = cls.get_thread_local_locking_context()
|
cls, process_instance_id: int, additional_processing_identifier: str | None = None
|
||||||
|
) -> ProcessInstanceQueueModel | None:
|
||||||
|
ctx = cls.get_thread_local_locking_context(additional_processing_identifier=additional_processing_identifier)
|
||||||
return ctx["locks"].pop(process_instance_id, None) # type: ignore
|
return ctx["locks"].pop(process_instance_id, None) # type: ignore
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def has_lock(cls, process_instance_id: int) -> bool:
|
def has_lock(cls, process_instance_id: int, additional_processing_identifier: str | None = None) -> bool:
|
||||||
ctx = cls.get_thread_local_locking_context()
|
ctx = cls.get_thread_local_locking_context(additional_processing_identifier=additional_processing_identifier)
|
||||||
|
current_app.logger.info(f"THREAD LOCK: {ctx}")
|
||||||
return process_instance_id in ctx["locks"]
|
return process_instance_id in ctx["locks"]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -101,6 +101,7 @@ from spiffworkflow_backend.services.workflow_execution_service import ExecutionS
|
||||||
from spiffworkflow_backend.services.workflow_execution_service import ExecutionStrategyNotConfiguredError
|
from spiffworkflow_backend.services.workflow_execution_service import ExecutionStrategyNotConfiguredError
|
||||||
from spiffworkflow_backend.services.workflow_execution_service import SkipOneExecutionStrategy
|
from spiffworkflow_backend.services.workflow_execution_service import SkipOneExecutionStrategy
|
||||||
from spiffworkflow_backend.services.workflow_execution_service import TaskModelSavingDelegate
|
from spiffworkflow_backend.services.workflow_execution_service import TaskModelSavingDelegate
|
||||||
|
from spiffworkflow_backend.services.workflow_execution_service import TaskRunnability
|
||||||
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionService
|
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionService
|
||||||
from spiffworkflow_backend.services.workflow_execution_service import execution_strategy_named
|
from spiffworkflow_backend.services.workflow_execution_service import execution_strategy_named
|
||||||
from spiffworkflow_backend.specs.start_event import StartEvent
|
from spiffworkflow_backend.specs.start_event import StartEvent
|
||||||
|
@ -422,10 +423,12 @@ class ProcessInstanceProcessor:
|
||||||
script_engine: PythonScriptEngine | None = None,
|
script_engine: PythonScriptEngine | None = None,
|
||||||
workflow_completed_handler: WorkflowCompletedHandler | None = None,
|
workflow_completed_handler: WorkflowCompletedHandler | None = None,
|
||||||
process_id_to_run: str | None = None,
|
process_id_to_run: str | None = None,
|
||||||
|
additional_processing_identifier: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
||||||
self._script_engine = script_engine or self.__class__._default_script_engine
|
self._script_engine = script_engine or self.__class__._default_script_engine
|
||||||
self._workflow_completed_handler = workflow_completed_handler
|
self._workflow_completed_handler = workflow_completed_handler
|
||||||
|
self.additional_processing_identifier = additional_processing_identifier
|
||||||
self.setup_processor_with_process_instance(
|
self.setup_processor_with_process_instance(
|
||||||
process_instance_model=process_instance_model,
|
process_instance_model=process_instance_model,
|
||||||
validate_only=validate_only,
|
validate_only=validate_only,
|
||||||
|
@ -520,9 +523,7 @@ class ProcessInstanceProcessor:
|
||||||
return bpmn_process_instance
|
return bpmn_process_instance
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_script_engine(
|
def set_script_engine(bpmn_process_instance: BpmnWorkflow, script_engine: PythonScriptEngine | None = None) -> None:
|
||||||
bpmn_process_instance: BpmnWorkflow, script_engine: PythonScriptEngine | None = None
|
|
||||||
) -> None:
|
|
||||||
script_engine_to_use = script_engine or ProcessInstanceProcessor._default_script_engine
|
script_engine_to_use = script_engine or ProcessInstanceProcessor._default_script_engine
|
||||||
script_engine_to_use.environment.restore_state(bpmn_process_instance)
|
script_engine_to_use.environment.restore_state(bpmn_process_instance)
|
||||||
bpmn_process_instance.script_engine = script_engine_to_use
|
bpmn_process_instance.script_engine = script_engine_to_use
|
||||||
|
@ -562,15 +563,11 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_process_definition.bpmn_identifier,
|
bpmn_process_definition.bpmn_identifier,
|
||||||
bpmn_process_definition=bpmn_process_definition,
|
bpmn_process_definition=bpmn_process_definition,
|
||||||
)
|
)
|
||||||
task_definitions = TaskDefinitionModel.query.filter_by(
|
task_definitions = TaskDefinitionModel.query.filter_by(bpmn_process_definition_id=bpmn_process_definition.id).all()
|
||||||
bpmn_process_definition_id=bpmn_process_definition.id
|
|
||||||
).all()
|
|
||||||
bpmn_process_definition_dict: dict = bpmn_process_definition.properties_json
|
bpmn_process_definition_dict: dict = bpmn_process_definition.properties_json
|
||||||
bpmn_process_definition_dict["task_specs"] = {}
|
bpmn_process_definition_dict["task_specs"] = {}
|
||||||
for task_definition in task_definitions:
|
for task_definition in task_definitions:
|
||||||
bpmn_process_definition_dict["task_specs"][
|
bpmn_process_definition_dict["task_specs"][task_definition.bpmn_identifier] = task_definition.properties_json
|
||||||
task_definition.bpmn_identifier
|
|
||||||
] = task_definition.properties_json
|
|
||||||
cls._update_bpmn_definition_mappings(
|
cls._update_bpmn_definition_mappings(
|
||||||
bpmn_definition_to_task_definitions_mappings,
|
bpmn_definition_to_task_definitions_mappings,
|
||||||
bpmn_process_definition.bpmn_identifier,
|
bpmn_process_definition.bpmn_identifier,
|
||||||
|
@ -589,8 +586,7 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_process_subprocess_definitions = (
|
bpmn_process_subprocess_definitions = (
|
||||||
BpmnProcessDefinitionModel.query.join(
|
BpmnProcessDefinitionModel.query.join(
|
||||||
BpmnProcessDefinitionRelationshipModel,
|
BpmnProcessDefinitionRelationshipModel,
|
||||||
BpmnProcessDefinitionModel.id
|
BpmnProcessDefinitionModel.id == BpmnProcessDefinitionRelationshipModel.bpmn_process_definition_child_id,
|
||||||
== BpmnProcessDefinitionRelationshipModel.bpmn_process_definition_child_id,
|
|
||||||
)
|
)
|
||||||
.filter_by(bpmn_process_definition_parent_id=bpmn_process_definition.id)
|
.filter_by(bpmn_process_definition_parent_id=bpmn_process_definition.id)
|
||||||
.all()
|
.all()
|
||||||
|
@ -604,18 +600,14 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_process_definition=bpmn_subprocess_definition,
|
bpmn_process_definition=bpmn_subprocess_definition,
|
||||||
)
|
)
|
||||||
bpmn_process_definition_dict: dict = bpmn_subprocess_definition.properties_json
|
bpmn_process_definition_dict: dict = bpmn_subprocess_definition.properties_json
|
||||||
spiff_bpmn_process_dict["subprocess_specs"][
|
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier] = bpmn_process_definition_dict
|
||||||
bpmn_subprocess_definition.bpmn_identifier
|
|
||||||
] = bpmn_process_definition_dict
|
|
||||||
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
|
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
|
||||||
bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = (
|
bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = (
|
||||||
bpmn_subprocess_definition.bpmn_identifier
|
bpmn_subprocess_definition.bpmn_identifier
|
||||||
)
|
)
|
||||||
|
|
||||||
task_definitions = TaskDefinitionModel.query.filter(
|
task_definitions = TaskDefinitionModel.query.filter(
|
||||||
TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore
|
TaskDefinitionModel.bpmn_process_definition_id.in_(bpmn_subprocess_definition_bpmn_identifiers.keys()) # type: ignore
|
||||||
bpmn_subprocess_definition_bpmn_identifiers.keys()
|
|
||||||
)
|
|
||||||
).all()
|
).all()
|
||||||
for task_definition in task_definitions:
|
for task_definition in task_definitions:
|
||||||
bpmn_subprocess_definition_bpmn_identifier = bpmn_subprocess_definition_bpmn_identifiers[
|
bpmn_subprocess_definition_bpmn_identifier = bpmn_subprocess_definition_bpmn_identifiers[
|
||||||
|
@ -729,10 +721,7 @@ class ProcessInstanceProcessor:
|
||||||
spiff_bpmn_process_dict["spec"] = element_unit_process_dict["spec"]
|
spiff_bpmn_process_dict["spec"] = element_unit_process_dict["spec"]
|
||||||
keys = list(spiff_bpmn_process_dict["subprocess_specs"].keys())
|
keys = list(spiff_bpmn_process_dict["subprocess_specs"].keys())
|
||||||
for k in keys:
|
for k in keys:
|
||||||
if (
|
if k not in subprocess_specs_for_ready_tasks and k not in element_unit_process_dict["subprocess_specs"]:
|
||||||
k not in subprocess_specs_for_ready_tasks
|
|
||||||
and k not in element_unit_process_dict["subprocess_specs"]
|
|
||||||
):
|
|
||||||
spiff_bpmn_process_dict["subprocess_specs"].pop(k)
|
spiff_bpmn_process_dict["subprocess_specs"].pop(k)
|
||||||
|
|
||||||
bpmn_process = process_instance_model.bpmn_process
|
bpmn_process = process_instance_model.bpmn_process
|
||||||
|
@ -810,9 +799,7 @@ class ProcessInstanceProcessor:
|
||||||
finally:
|
finally:
|
||||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||||
else:
|
else:
|
||||||
bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(spec, subprocesses)
|
||||||
spec, subprocesses
|
|
||||||
)
|
|
||||||
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -862,9 +849,7 @@ class ProcessInstanceProcessor:
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if group_model is None:
|
if group_model is None:
|
||||||
raise (
|
raise (NoPotentialOwnersForTaskError(f"Could not find a group with name matching lane: {task_lane}"))
|
||||||
NoPotentialOwnersForTaskError(f"Could not find a group with name matching lane: {task_lane}")
|
|
||||||
)
|
|
||||||
potential_owner_ids = [i.user_id for i in group_model.user_group_assignments]
|
potential_owner_ids = [i.user_id for i in group_model.user_group_assignments]
|
||||||
self.raise_if_no_potential_owners(
|
self.raise_if_no_potential_owners(
|
||||||
potential_owner_ids,
|
potential_owner_ids,
|
||||||
|
@ -924,16 +909,12 @@ class ProcessInstanceProcessor:
|
||||||
single_process_hash = sha256(json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8")).hexdigest()
|
single_process_hash = sha256(json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8")).hexdigest()
|
||||||
full_process_model_hash = None
|
full_process_model_hash = None
|
||||||
if full_bpmn_spec_dict is not None:
|
if full_bpmn_spec_dict is not None:
|
||||||
full_process_model_hash = sha256(
|
full_process_model_hash = sha256(json.dumps(full_bpmn_spec_dict, sort_keys=True).encode("utf8")).hexdigest()
|
||||||
json.dumps(full_bpmn_spec_dict, sort_keys=True).encode("utf8")
|
|
||||||
).hexdigest()
|
|
||||||
bpmn_process_definition = BpmnProcessDefinitionModel.query.filter_by(
|
bpmn_process_definition = BpmnProcessDefinitionModel.query.filter_by(
|
||||||
full_process_model_hash=full_process_model_hash
|
full_process_model_hash=full_process_model_hash
|
||||||
).first()
|
).first()
|
||||||
else:
|
else:
|
||||||
bpmn_process_definition = BpmnProcessDefinitionModel.query.filter_by(
|
bpmn_process_definition = BpmnProcessDefinitionModel.query.filter_by(single_process_hash=single_process_hash).first()
|
||||||
single_process_hash=single_process_hash
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if bpmn_process_definition is None:
|
if bpmn_process_definition is None:
|
||||||
task_specs = process_bpmn_properties.pop("task_specs")
|
task_specs = process_bpmn_properties.pop("task_specs")
|
||||||
|
@ -974,9 +955,7 @@ class ProcessInstanceProcessor:
|
||||||
process_bpmn_identifier,
|
process_bpmn_identifier,
|
||||||
bpmn_process_definition=bpmn_process_definition,
|
bpmn_process_definition=bpmn_process_definition,
|
||||||
)
|
)
|
||||||
task_definitions = TaskDefinitionModel.query.filter_by(
|
task_definitions = TaskDefinitionModel.query.filter_by(bpmn_process_definition_id=bpmn_process_definition.id).all()
|
||||||
bpmn_process_definition_id=bpmn_process_definition.id
|
|
||||||
).all()
|
|
||||||
for task_definition in task_definitions:
|
for task_definition in task_definitions:
|
||||||
self._update_bpmn_definition_mappings(
|
self._update_bpmn_definition_mappings(
|
||||||
self.bpmn_definition_to_task_definitions_mappings,
|
self.bpmn_definition_to_task_definitions_mappings,
|
||||||
|
@ -1067,15 +1046,11 @@ class ProcessInstanceProcessor:
|
||||||
db.session.add(self.process_instance_model)
|
db.session.add(self.process_instance_model)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
human_tasks = HumanTaskModel.query.filter_by(
|
human_tasks = HumanTaskModel.query.filter_by(process_instance_id=self.process_instance_model.id, completed=False).all()
|
||||||
process_instance_id=self.process_instance_model.id, completed=False
|
|
||||||
).all()
|
|
||||||
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
|
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
|
||||||
|
|
||||||
process_model_display_name = ""
|
process_model_display_name = ""
|
||||||
process_model_info = ProcessModelService.get_process_model(
|
process_model_info = ProcessModelService.get_process_model(self.process_instance_model.process_model_identifier)
|
||||||
self.process_instance_model.process_model_identifier
|
|
||||||
)
|
|
||||||
if process_model_info is not None:
|
if process_model_info is not None:
|
||||||
process_model_display_name = process_model_info.display_name
|
process_model_display_name = process_model_info.display_name
|
||||||
|
|
||||||
|
@ -1177,15 +1152,13 @@ class ProcessInstanceProcessor:
|
||||||
if spiff_task.task_spec.manual:
|
if spiff_task.task_spec.manual:
|
||||||
# Executing or not executing a human task results in the same state.
|
# Executing or not executing a human task results in the same state.
|
||||||
current_app.logger.info(
|
current_app.logger.info(
|
||||||
f"Manually skipping Human Task {spiff_task.task_spec.name} of process"
|
f"Manually skipping Human Task {spiff_task.task_spec.name} of process instance {self.process_instance_model.id}"
|
||||||
f" instance {self.process_instance_model.id}"
|
|
||||||
)
|
)
|
||||||
human_task = HumanTaskModel.query.filter_by(task_id=task_id).first()
|
human_task = HumanTaskModel.query.filter_by(task_id=task_id).first()
|
||||||
self.complete_task(spiff_task, human_task=human_task, user=user)
|
self.complete_task(spiff_task, human_task=human_task, user=user)
|
||||||
elif execute:
|
elif execute:
|
||||||
current_app.logger.info(
|
current_app.logger.info(
|
||||||
f"Manually executing Task {spiff_task.task_spec.name} of process"
|
f"Manually executing Task {spiff_task.task_spec.name} of process instance {self.process_instance_model.id}"
|
||||||
f" instance {self.process_instance_model.id}"
|
|
||||||
)
|
)
|
||||||
self.do_engine_steps(save=True, execution_strategy_name="run_current_ready_tasks")
|
self.do_engine_steps(save=True, execution_strategy_name="run_current_ready_tasks")
|
||||||
else:
|
else:
|
||||||
|
@ -1207,9 +1180,7 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
||||||
)
|
)
|
||||||
task_service.update_all_tasks_from_spiff_tasks(spiff_tasks, [], start_time)
|
task_service.update_all_tasks_from_spiff_tasks(spiff_tasks, [], start_time)
|
||||||
ProcessInstanceTmpService.add_event_to_process_instance(
|
ProcessInstanceTmpService.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id)
|
||||||
self.process_instance_model, event_type, task_guid=task_id
|
|
||||||
)
|
|
||||||
|
|
||||||
self.save()
|
self.save()
|
||||||
# Saving the workflow seems to reset the status
|
# Saving the workflow seems to reset the status
|
||||||
|
@ -1265,18 +1236,12 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_process_identifier: str,
|
bpmn_process_identifier: str,
|
||||||
) -> str:
|
) -> str:
|
||||||
if bpmn_process_identifier is None:
|
if bpmn_process_identifier is None:
|
||||||
raise ValueError(
|
raise ValueError("bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None")
|
||||||
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
|
|
||||||
)
|
|
||||||
|
|
||||||
spec_reference = (
|
spec_reference = ReferenceCacheModel.basic_query().filter_by(identifier=bpmn_process_identifier, type="process").first()
|
||||||
ReferenceCacheModel.basic_query().filter_by(identifier=bpmn_process_identifier, type="process").first()
|
|
||||||
)
|
|
||||||
bpmn_file_full_path = None
|
bpmn_file_full_path = None
|
||||||
if spec_reference is None:
|
if spec_reference is None:
|
||||||
bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_spec_reference_records(
|
bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_spec_reference_records(bpmn_process_identifier)
|
||||||
bpmn_process_identifier
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
bpmn_file_full_path = os.path.join(
|
bpmn_file_full_path = os.path.join(
|
||||||
FileSystemService.root_path(),
|
FileSystemService.root_path(),
|
||||||
|
@ -1286,10 +1251,7 @@ class ProcessInstanceProcessor:
|
||||||
raise (
|
raise (
|
||||||
ApiError(
|
ApiError(
|
||||||
error_code="could_not_find_bpmn_process_identifier",
|
error_code="could_not_find_bpmn_process_identifier",
|
||||||
message=(
|
message=f"Could not find the the given bpmn process identifier from any sources: {bpmn_process_identifier}",
|
||||||
"Could not find the the given bpmn process identifier from any sources:"
|
|
||||||
f" {bpmn_process_identifier}"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return os.path.abspath(bpmn_file_full_path)
|
return os.path.abspath(bpmn_file_full_path)
|
||||||
|
@ -1325,9 +1287,7 @@ class ProcessInstanceProcessor:
|
||||||
|
|
||||||
if new_bpmn_files:
|
if new_bpmn_files:
|
||||||
parser.add_bpmn_files(new_bpmn_files)
|
parser.add_bpmn_files(new_bpmn_files)
|
||||||
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(
|
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(parser, processed_identifiers)
|
||||||
parser, processed_identifiers
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_spec(
|
def get_spec(
|
||||||
|
@ -1384,6 +1344,7 @@ class ProcessInstanceProcessor:
|
||||||
if bpmn_process_instance.is_completed():
|
if bpmn_process_instance.is_completed():
|
||||||
return ProcessInstanceStatus.complete
|
return ProcessInstanceStatus.complete
|
||||||
user_tasks = bpmn_process_instance.get_tasks(state=TaskState.READY, manual=True)
|
user_tasks = bpmn_process_instance.get_tasks(state=TaskState.READY, manual=True)
|
||||||
|
ready_tasks = bpmn_process_instance.get_tasks(state=TaskState.READY)
|
||||||
|
|
||||||
# workflow.waiting_events (includes timers, and timers have a when firing property)
|
# workflow.waiting_events (includes timers, and timers have a when firing property)
|
||||||
|
|
||||||
|
@ -1396,6 +1357,8 @@ class ProcessInstanceProcessor:
|
||||||
# return ProcessInstanceStatus.waiting
|
# return ProcessInstanceStatus.waiting
|
||||||
if len(user_tasks) > 0:
|
if len(user_tasks) > 0:
|
||||||
return ProcessInstanceStatus.user_input_required
|
return ProcessInstanceStatus.user_input_required
|
||||||
|
elif len(ready_tasks) > 0:
|
||||||
|
return ProcessInstanceStatus.running
|
||||||
else:
|
else:
|
||||||
return ProcessInstanceStatus.waiting
|
return ProcessInstanceStatus.waiting
|
||||||
|
|
||||||
|
@ -1455,15 +1418,17 @@ class ProcessInstanceProcessor:
|
||||||
save: bool = False,
|
save: bool = False,
|
||||||
execution_strategy_name: str | None = None,
|
execution_strategy_name: str | None = None,
|
||||||
execution_strategy: ExecutionStrategy | None = None,
|
execution_strategy: ExecutionStrategy | None = None,
|
||||||
) -> None:
|
) -> TaskRunnability:
|
||||||
if self.process_instance_model.persistence_level != "none":
|
if self.process_instance_model.persistence_level != "none":
|
||||||
with ProcessInstanceQueueService.dequeued(self.process_instance_model):
|
with ProcessInstanceQueueService.dequeued(
|
||||||
|
self.process_instance_model, additional_processing_identifier=self.additional_processing_identifier
|
||||||
|
):
|
||||||
# TODO: ideally we just lock in the execution service, but not sure
|
# TODO: ideally we just lock in the execution service, but not sure
|
||||||
# about _add_bpmn_process_definitions and if that needs to happen in
|
# about _add_bpmn_process_definitions and if that needs to happen in
|
||||||
# the same lock like it does on main
|
# the same lock like it does on main
|
||||||
self._do_engine_steps(exit_at, save, execution_strategy_name, execution_strategy)
|
return self._do_engine_steps(exit_at, save, execution_strategy_name, execution_strategy)
|
||||||
else:
|
else:
|
||||||
self._do_engine_steps(
|
return self._do_engine_steps(
|
||||||
exit_at,
|
exit_at,
|
||||||
save=False,
|
save=False,
|
||||||
execution_strategy_name=execution_strategy_name,
|
execution_strategy_name=execution_strategy_name,
|
||||||
|
@ -1476,7 +1441,7 @@ class ProcessInstanceProcessor:
|
||||||
save: bool = False,
|
save: bool = False,
|
||||||
execution_strategy_name: str | None = None,
|
execution_strategy_name: str | None = None,
|
||||||
execution_strategy: ExecutionStrategy | None = None,
|
execution_strategy: ExecutionStrategy | None = None,
|
||||||
) -> None:
|
) -> TaskRunnability:
|
||||||
self._add_bpmn_process_definitions()
|
self._add_bpmn_process_definitions()
|
||||||
|
|
||||||
task_model_delegate = TaskModelSavingDelegate(
|
task_model_delegate = TaskModelSavingDelegate(
|
||||||
|
@ -1502,9 +1467,11 @@ class ProcessInstanceProcessor:
|
||||||
execution_strategy,
|
execution_strategy,
|
||||||
self._script_engine.environment.finalize_result,
|
self._script_engine.environment.finalize_result,
|
||||||
self.save,
|
self.save,
|
||||||
|
additional_processing_identifier=self.additional_processing_identifier,
|
||||||
)
|
)
|
||||||
execution_service.run_and_save(exit_at, save)
|
task_runnability = execution_service.run_and_save(exit_at, save)
|
||||||
self.check_all_tasks()
|
self.check_all_tasks()
|
||||||
|
return task_runnability
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> list[SpiffTask]:
|
def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> list[SpiffTask]:
|
||||||
|
@ -1760,9 +1727,7 @@ class ProcessInstanceProcessor:
|
||||||
return self.bpmn_process_instance.get_task_from_id(UUID(task_guid))
|
return self.bpmn_process_instance.get_task_from_id(UUID(task_guid))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_task_by_bpmn_identifier(
|
def get_task_by_bpmn_identifier(cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow) -> SpiffTask | None:
|
||||||
cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow
|
|
||||||
) -> SpiffTask | None:
|
|
||||||
all_tasks = bpmn_process_instance.get_tasks(state=TaskState.ANY_MASK)
|
all_tasks = bpmn_process_instance.get_tasks(state=TaskState.ANY_MASK)
|
||||||
for task in all_tasks:
|
for task in all_tasks:
|
||||||
if task.task_spec.name == bpmn_task_identifier:
|
if task.task_spec.name == bpmn_task_identifier:
|
||||||
|
|
|
@ -35,22 +35,22 @@ class ProcessInstanceQueueService:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def enqueue_new_process_instance(cls, process_instance: ProcessInstanceModel, run_at_in_seconds: int) -> None:
|
def enqueue_new_process_instance(cls, process_instance: ProcessInstanceModel, run_at_in_seconds: int) -> None:
|
||||||
queue_entry = ProcessInstanceQueueModel(
|
queue_entry = ProcessInstanceQueueModel(process_instance_id=process_instance.id, run_at_in_seconds=run_at_in_seconds)
|
||||||
process_instance_id=process_instance.id, run_at_in_seconds=run_at_in_seconds
|
|
||||||
)
|
|
||||||
cls._configure_and_save_queue_entry(process_instance, queue_entry)
|
cls._configure_and_save_queue_entry(process_instance, queue_entry)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _enqueue(cls, process_instance: ProcessInstanceModel) -> None:
|
def _enqueue(cls, process_instance: ProcessInstanceModel, additional_processing_identifier: str | None = None) -> None:
|
||||||
queue_entry = ProcessInstanceLockService.unlock(process_instance.id)
|
queue_entry = ProcessInstanceLockService.unlock(
|
||||||
|
process_instance.id, additional_processing_identifier=additional_processing_identifier
|
||||||
|
)
|
||||||
current_time = round(time.time())
|
current_time = round(time.time())
|
||||||
if current_time > queue_entry.run_at_in_seconds:
|
if current_time > queue_entry.run_at_in_seconds:
|
||||||
queue_entry.run_at_in_seconds = current_time
|
queue_entry.run_at_in_seconds = current_time
|
||||||
cls._configure_and_save_queue_entry(process_instance, queue_entry)
|
cls._configure_and_save_queue_entry(process_instance, queue_entry)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _dequeue(cls, process_instance: ProcessInstanceModel) -> None:
|
def _dequeue(cls, process_instance: ProcessInstanceModel, additional_processing_identifier: str | None = None) -> None:
|
||||||
locked_by = ProcessInstanceLockService.locked_by()
|
locked_by = ProcessInstanceLockService.locked_by(additional_processing_identifier=additional_processing_identifier)
|
||||||
current_time = round(time.time())
|
current_time = round(time.time())
|
||||||
|
|
||||||
db.session.query(ProcessInstanceQueueModel).filter(
|
db.session.query(ProcessInstanceQueueModel).filter(
|
||||||
|
@ -84,16 +84,22 @@ class ProcessInstanceQueueService:
|
||||||
f"It has already been locked by {queue_entry.locked_by}."
|
f"It has already been locked by {queue_entry.locked_by}."
|
||||||
)
|
)
|
||||||
|
|
||||||
ProcessInstanceLockService.lock(process_instance.id, queue_entry)
|
ProcessInstanceLockService.lock(
|
||||||
|
process_instance.id, queue_entry, additional_processing_identifier=additional_processing_identifier
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def dequeued(cls, process_instance: ProcessInstanceModel) -> Generator[None, None, None]:
|
def dequeued(
|
||||||
reentering_lock = ProcessInstanceLockService.has_lock(process_instance.id)
|
cls, process_instance: ProcessInstanceModel, additional_processing_identifier: str | None = None
|
||||||
|
) -> Generator[None, None, None]:
|
||||||
|
reentering_lock = ProcessInstanceLockService.has_lock(
|
||||||
|
process_instance.id, additional_processing_identifier=additional_processing_identifier
|
||||||
|
)
|
||||||
if not reentering_lock:
|
if not reentering_lock:
|
||||||
# this can blow up with ProcessInstanceIsNotEnqueuedError or ProcessInstanceIsAlreadyLockedError
|
# this can blow up with ProcessInstanceIsNotEnqueuedError or ProcessInstanceIsAlreadyLockedError
|
||||||
# that's fine, let it bubble up. and in that case, there's no need to _enqueue / unlock
|
# that's fine, let it bubble up. and in that case, there's no need to _enqueue / unlock
|
||||||
cls._dequeue(process_instance)
|
cls._dequeue(process_instance, additional_processing_identifier=additional_processing_identifier)
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
|
@ -107,7 +113,7 @@ class ProcessInstanceQueueService:
|
||||||
raise ex
|
raise ex
|
||||||
finally:
|
finally:
|
||||||
if not reentering_lock:
|
if not reentering_lock:
|
||||||
cls._enqueue(process_instance)
|
cls._enqueue(process_instance, additional_processing_identifier=additional_processing_identifier)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def entries_with_status(
|
def entries_with_status(
|
||||||
|
|
|
@ -173,9 +173,7 @@ class ProcessInstanceReportService:
|
||||||
),
|
),
|
||||||
"system_report_completed_instances": system_report_completed_instances,
|
"system_report_completed_instances": system_report_completed_instances,
|
||||||
"system_report_in_progress_instances_initiated_by_me": system_report_in_progress_instances_initiated_by_me,
|
"system_report_in_progress_instances_initiated_by_me": system_report_in_progress_instances_initiated_by_me,
|
||||||
"system_report_in_progress_instances_with_tasks_for_me": (
|
"system_report_in_progress_instances_with_tasks_for_me": system_report_in_progress_instances_with_tasks_for_me,
|
||||||
system_report_in_progress_instances_with_tasks_for_me
|
|
||||||
),
|
|
||||||
"system_report_in_progress_instances_with_tasks": system_report_in_progress_instances_with_tasks,
|
"system_report_in_progress_instances_with_tasks": system_report_in_progress_instances_with_tasks,
|
||||||
}
|
}
|
||||||
if metadata_key not in temp_system_metadata_map:
|
if metadata_key not in temp_system_metadata_map:
|
||||||
|
@ -184,9 +182,7 @@ class ProcessInstanceReportService:
|
||||||
return return_value
|
return return_value
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def process_instance_metadata_as_columns(
|
def process_instance_metadata_as_columns(cls, process_model_identifier: str | None = None) -> list[ReportMetadataColumn]:
|
||||||
cls, process_model_identifier: str | None = None
|
|
||||||
) -> list[ReportMetadataColumn]:
|
|
||||||
columns_for_metadata_query = (
|
columns_for_metadata_query = (
|
||||||
db.session.query(ProcessInstanceMetadataModel.key)
|
db.session.query(ProcessInstanceMetadataModel.key)
|
||||||
.order_by(ProcessInstanceMetadataModel.key)
|
.order_by(ProcessInstanceMetadataModel.key)
|
||||||
|
@ -226,9 +222,7 @@ class ProcessInstanceReportService:
|
||||||
report_identifier: str | None = None,
|
report_identifier: str | None = None,
|
||||||
) -> ProcessInstanceReportModel:
|
) -> ProcessInstanceReportModel:
|
||||||
if report_id is not None:
|
if report_id is not None:
|
||||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
process_instance_report = ProcessInstanceReportModel.query.filter_by(id=report_id, created_by_id=user.id).first()
|
||||||
id=report_id, created_by_id=user.id
|
|
||||||
).first()
|
|
||||||
if process_instance_report is not None:
|
if process_instance_report is not None:
|
||||||
return process_instance_report # type: ignore
|
return process_instance_report # type: ignore
|
||||||
|
|
||||||
|
@ -269,14 +263,10 @@ class ProcessInstanceReportService:
|
||||||
process_instance_dict = process_instance_row[0].serialized()
|
process_instance_dict = process_instance_row[0].serialized()
|
||||||
for metadata_column in metadata_columns:
|
for metadata_column in metadata_columns:
|
||||||
if metadata_column["accessor"] not in process_instance_dict:
|
if metadata_column["accessor"] not in process_instance_dict:
|
||||||
process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[
|
process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[metadata_column["accessor"]]
|
||||||
metadata_column["accessor"]
|
|
||||||
]
|
|
||||||
|
|
||||||
if "last_milestone_bpmn_name" in process_instance_mapping:
|
if "last_milestone_bpmn_name" in process_instance_mapping:
|
||||||
process_instance_dict["last_milestone_bpmn_name"] = process_instance_mapping[
|
process_instance_dict["last_milestone_bpmn_name"] = process_instance_mapping["last_milestone_bpmn_name"]
|
||||||
"last_milestone_bpmn_name"
|
|
||||||
]
|
|
||||||
|
|
||||||
results.append(process_instance_dict)
|
results.append(process_instance_dict)
|
||||||
return results
|
return results
|
||||||
|
@ -305,9 +295,7 @@ class ProcessInstanceReportService:
|
||||||
.outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
|
.outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
|
||||||
)
|
)
|
||||||
if restrict_human_tasks_to_user is not None:
|
if restrict_human_tasks_to_user is not None:
|
||||||
human_task_query = human_task_query.filter(
|
human_task_query = human_task_query.filter(HumanTaskUserModel.user_id == restrict_human_tasks_to_user.id)
|
||||||
HumanTaskUserModel.user_id == restrict_human_tasks_to_user.id
|
|
||||||
)
|
|
||||||
potential_owner_usernames_from_group_concat_or_similar = cls._get_potential_owner_usernames(assigned_user)
|
potential_owner_usernames_from_group_concat_or_similar = cls._get_potential_owner_usernames(assigned_user)
|
||||||
human_task = (
|
human_task = (
|
||||||
human_task_query.add_columns(
|
human_task_query.add_columns(
|
||||||
|
@ -327,9 +315,9 @@ class ProcessInstanceReportService:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_potential_owner_usernames(cls, assigned_user: AliasedClass) -> Any:
|
def _get_potential_owner_usernames(cls, assigned_user: AliasedClass) -> Any:
|
||||||
potential_owner_usernames_from_group_concat_or_similar = func.group_concat(
|
potential_owner_usernames_from_group_concat_or_similar = func.group_concat(assigned_user.username.distinct()).label(
|
||||||
assigned_user.username.distinct()
|
"potential_owner_usernames"
|
||||||
).label("potential_owner_usernames")
|
)
|
||||||
db_type = current_app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE")
|
db_type = current_app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE")
|
||||||
|
|
||||||
if db_type == "postgres":
|
if db_type == "postgres":
|
||||||
|
@ -421,13 +409,9 @@ class ProcessInstanceReportService:
|
||||||
if human_task_already_joined is False:
|
if human_task_already_joined is False:
|
||||||
process_instance_query = process_instance_query.join(HumanTaskModel) # type: ignore
|
process_instance_query = process_instance_query.join(HumanTaskModel) # type: ignore
|
||||||
if process_status is not None:
|
if process_status is not None:
|
||||||
non_active_statuses = [
|
non_active_statuses = [s for s in process_status.split(",") if s not in ProcessInstanceModel.active_statuses()]
|
||||||
s for s in process_status.split(",") if s not in ProcessInstanceModel.active_statuses()
|
|
||||||
]
|
|
||||||
if len(non_active_statuses) == 0:
|
if len(non_active_statuses) == 0:
|
||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(HumanTaskModel.completed.is_(False)) # type: ignore
|
||||||
HumanTaskModel.completed.is_(False) # type: ignore
|
|
||||||
)
|
|
||||||
# Check to make sure the task is not only available for the group but the user as well
|
# Check to make sure the task is not only available for the group but the user as well
|
||||||
if instances_with_tasks_waiting_for_me is not True:
|
if instances_with_tasks_waiting_for_me is not True:
|
||||||
human_task_user_alias = aliased(HumanTaskUserModel)
|
human_task_user_alias = aliased(HumanTaskUserModel)
|
||||||
|
@ -519,9 +503,7 @@ class ProcessInstanceReportService:
|
||||||
and with_relation_to_me is True
|
and with_relation_to_me is True
|
||||||
):
|
):
|
||||||
if user is None:
|
if user is None:
|
||||||
raise ProcessInstanceReportCannotBeRunError(
|
raise ProcessInstanceReportCannotBeRunError("A user must be specified to run report with with_relation_to_me")
|
||||||
"A user must be specified to run report with with_relation_to_me"
|
|
||||||
)
|
|
||||||
process_instance_query = process_instance_query.outerjoin(HumanTaskModel).outerjoin(
|
process_instance_query = process_instance_query.outerjoin(HumanTaskModel).outerjoin(
|
||||||
HumanTaskUserModel,
|
HumanTaskUserModel,
|
||||||
and_(
|
and_(
|
||||||
|
@ -550,9 +532,7 @@ class ProcessInstanceReportService:
|
||||||
raise ProcessInstanceReportCannotBeRunError(
|
raise ProcessInstanceReportCannotBeRunError(
|
||||||
"A user must be specified to run report with instances_with_tasks_completed_by_me."
|
"A user must be specified to run report with instances_with_tasks_completed_by_me."
|
||||||
)
|
)
|
||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(ProcessInstanceModel.process_initiator_id != user.id)
|
||||||
ProcessInstanceModel.process_initiator_id != user.id
|
|
||||||
)
|
|
||||||
process_instance_query = process_instance_query.join(
|
process_instance_query = process_instance_query.join(
|
||||||
HumanTaskModel,
|
HumanTaskModel,
|
||||||
and_(
|
and_(
|
||||||
|
@ -569,9 +549,7 @@ class ProcessInstanceReportService:
|
||||||
raise ProcessInstanceReportCannotBeRunError(
|
raise ProcessInstanceReportCannotBeRunError(
|
||||||
"A user must be specified to run report with instances_with_tasks_waiting_for_me."
|
"A user must be specified to run report with instances_with_tasks_waiting_for_me."
|
||||||
)
|
)
|
||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(ProcessInstanceModel.process_initiator_id != user.id)
|
||||||
ProcessInstanceModel.process_initiator_id != user.id
|
|
||||||
)
|
|
||||||
process_instance_query = process_instance_query.join(
|
process_instance_query = process_instance_query.join(
|
||||||
HumanTaskModel,
|
HumanTaskModel,
|
||||||
and_(
|
and_(
|
||||||
|
@ -605,9 +583,7 @@ class ProcessInstanceReportService:
|
||||||
|
|
||||||
if user_group_identifier is not None:
|
if user_group_identifier is not None:
|
||||||
if user is None:
|
if user is None:
|
||||||
raise ProcessInstanceReportCannotBeRunError(
|
raise ProcessInstanceReportCannotBeRunError("A user must be specified to run report with a group identifier.")
|
||||||
"A user must be specified to run report with a group identifier."
|
|
||||||
)
|
|
||||||
process_instance_query = cls.filter_by_user_group_identifier(
|
process_instance_query = cls.filter_by_user_group_identifier(
|
||||||
process_instance_query=process_instance_query,
|
process_instance_query=process_instance_query,
|
||||||
user_group_identifier=user_group_identifier,
|
user_group_identifier=user_group_identifier,
|
||||||
|
@ -647,9 +623,7 @@ class ProcessInstanceReportService:
|
||||||
elif filter_for_column["operator"] == "less_than":
|
elif filter_for_column["operator"] == "less_than":
|
||||||
join_conditions.append(instance_metadata_alias.value < filter_for_column["field_value"])
|
join_conditions.append(instance_metadata_alias.value < filter_for_column["field_value"])
|
||||||
elif filter_for_column["operator"] == "contains":
|
elif filter_for_column["operator"] == "contains":
|
||||||
join_conditions.append(
|
join_conditions.append(instance_metadata_alias.value.like(f"%{filter_for_column['field_value']}%"))
|
||||||
instance_metadata_alias.value.like(f"%{filter_for_column['field_value']}%")
|
|
||||||
)
|
|
||||||
elif filter_for_column["operator"] == "is_empty":
|
elif filter_for_column["operator"] == "is_empty":
|
||||||
# we still need to return results if the metadata value is null so make sure it's outer join
|
# we still need to return results if the metadata value is null so make sure it's outer join
|
||||||
isouter = True
|
isouter = True
|
||||||
|
|
|
@ -17,12 +17,12 @@ from SpiffWorkflow.bpmn.specs.event_definitions.timer import TimerEventDefinitio
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.util.task import TaskState # type: ignore
|
from SpiffWorkflow.util.task import TaskState # type: ignore
|
||||||
|
|
||||||
from spiffworkflow_backend import db
|
|
||||||
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
|
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
|
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
|
||||||
from spiffworkflow_backend.exceptions.error import HumanTaskNotFoundError
|
from spiffworkflow_backend.exceptions.error import HumanTaskNotFoundError
|
||||||
from spiffworkflow_backend.exceptions.error import UserDoesNotHaveAccessToTaskError
|
from spiffworkflow_backend.exceptions.error import UserDoesNotHaveAccessToTaskError
|
||||||
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.group import GroupModel
|
from spiffworkflow_backend.models.group import GroupModel
|
||||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
|
||||||
|
@ -42,6 +42,7 @@ from spiffworkflow_backend.services.process_instance_processor import ProcessIns
|
||||||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
|
||||||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
|
from spiffworkflow_backend.services.workflow_execution_service import TaskRunnability
|
||||||
from spiffworkflow_backend.services.workflow_service import WorkflowService
|
from spiffworkflow_backend.services.workflow_service import WorkflowService
|
||||||
from spiffworkflow_backend.specs.start_event import StartConfiguration
|
from spiffworkflow_backend.specs.start_event import StartConfiguration
|
||||||
|
|
||||||
|
@ -125,16 +126,12 @@ class ProcessInstanceService:
|
||||||
user: UserModel,
|
user: UserModel,
|
||||||
) -> ProcessInstanceModel:
|
) -> ProcessInstanceModel:
|
||||||
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
||||||
process_instance_model, (cycle_count, _, duration_in_seconds) = cls.create_process_instance(
|
process_instance_model, (cycle_count, _, duration_in_seconds) = cls.create_process_instance(process_model, user)
|
||||||
process_model, user
|
|
||||||
)
|
|
||||||
cls.register_process_model_cycles(process_model_identifier, cycle_count, duration_in_seconds)
|
cls.register_process_model_cycles(process_model_identifier, cycle_count, duration_in_seconds)
|
||||||
return process_instance_model
|
return process_instance_model
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def register_process_model_cycles(
|
def register_process_model_cycles(cls, process_model_identifier: str, cycle_count: int, duration_in_seconds: int) -> None:
|
||||||
cls, process_model_identifier: str, cycle_count: int, duration_in_seconds: int
|
|
||||||
) -> None:
|
|
||||||
# clean up old cycle record if it exists. event if the given cycle_count is 0 the previous version
|
# clean up old cycle record if it exists. event if the given cycle_count is 0 the previous version
|
||||||
# of the model could have included a cycle timer start event
|
# of the model could have included a cycle timer start event
|
||||||
cycles = ProcessModelCycleModel.query.filter(
|
cycles = ProcessModelCycleModel.query.filter(
|
||||||
|
@ -230,6 +227,7 @@ class ProcessInstanceService:
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# this is only used from background processor
|
||||||
@classmethod
|
@classmethod
|
||||||
def do_waiting(cls, status_value: str) -> None:
|
def do_waiting(cls, status_value: str) -> None:
|
||||||
run_at_in_seconds_threshold = round(time.time())
|
run_at_in_seconds_threshold = round(time.time())
|
||||||
|
@ -268,12 +266,18 @@ class ProcessInstanceService:
|
||||||
process_instance: ProcessInstanceModel,
|
process_instance: ProcessInstanceModel,
|
||||||
status_value: str | None = None,
|
status_value: str | None = None,
|
||||||
execution_strategy_name: str | None = None,
|
execution_strategy_name: str | None = None,
|
||||||
) -> ProcessInstanceProcessor | None:
|
additional_processing_identifier: str | None = None,
|
||||||
|
) -> tuple[ProcessInstanceProcessor | None, TaskRunnability]:
|
||||||
processor = None
|
processor = None
|
||||||
with ProcessInstanceQueueService.dequeued(process_instance):
|
task_runnability = TaskRunnability.unknown_if_ready_tasks
|
||||||
|
with ProcessInstanceQueueService.dequeued(
|
||||||
|
process_instance, additional_processing_identifier=additional_processing_identifier
|
||||||
|
):
|
||||||
ProcessInstanceMigrator.run(process_instance)
|
ProcessInstanceMigrator.run(process_instance)
|
||||||
processor = ProcessInstanceProcessor(
|
processor = ProcessInstanceProcessor(
|
||||||
process_instance, workflow_completed_handler=cls.schedule_next_process_model_cycle
|
process_instance,
|
||||||
|
workflow_completed_handler=cls.schedule_next_process_model_cycle,
|
||||||
|
additional_processing_identifier=additional_processing_identifier,
|
||||||
)
|
)
|
||||||
|
|
||||||
# if status_value is user_input_required (we are processing instances with that status from background processor),
|
# if status_value is user_input_required (we are processing instances with that status from background processor),
|
||||||
|
@ -281,13 +285,16 @@ class ProcessInstanceService:
|
||||||
# otherwise, in all cases, we should optimistically skip it.
|
# otherwise, in all cases, we should optimistically skip it.
|
||||||
if status_value and cls.can_optimistically_skip(processor, status_value):
|
if status_value and cls.can_optimistically_skip(processor, status_value):
|
||||||
current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}")
|
current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}")
|
||||||
return None
|
return (processor, task_runnability)
|
||||||
|
|
||||||
db.session.refresh(process_instance)
|
db.session.refresh(process_instance)
|
||||||
if status_value is None or process_instance.status == status_value:
|
if status_value is None or process_instance.status == status_value:
|
||||||
processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name)
|
task_runnability = processor.do_engine_steps(
|
||||||
|
save=True,
|
||||||
|
execution_strategy_name=execution_strategy_name,
|
||||||
|
)
|
||||||
|
|
||||||
return processor
|
return (processor, task_runnability)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def processor_to_process_instance_api(process_instance: ProcessInstanceModel) -> ProcessInstanceApi:
|
def processor_to_process_instance_api(process_instance: ProcessInstanceModel) -> ProcessInstanceApi:
|
||||||
|
@ -334,10 +341,7 @@ class ProcessInstanceService:
|
||||||
else:
|
else:
|
||||||
raise ApiError.from_task(
|
raise ApiError.from_task(
|
||||||
error_code="task_lane_user_error",
|
error_code="task_lane_user_error",
|
||||||
message=(
|
message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it."
|
||||||
"Spiff Task %s lane user dict must have a key called"
|
|
||||||
" 'value' with the user's uid in it."
|
|
||||||
)
|
|
||||||
% spiff_task.task_spec.name,
|
% spiff_task.task_spec.name,
|
||||||
task=spiff_task,
|
task=spiff_task,
|
||||||
)
|
)
|
||||||
|
@ -425,9 +429,7 @@ class ProcessInstanceService:
|
||||||
models: list[ProcessInstanceFileDataModel],
|
models: list[ProcessInstanceFileDataModel],
|
||||||
) -> None:
|
) -> None:
|
||||||
for model in models:
|
for model in models:
|
||||||
digest_reference = (
|
digest_reference = f"data:{model.mimetype};name={model.filename};base64,{cls.FILE_DATA_DIGEST_PREFIX}{model.digest}"
|
||||||
f"data:{model.mimetype};name={model.filename};base64,{cls.FILE_DATA_DIGEST_PREFIX}{model.digest}"
|
|
||||||
)
|
|
||||||
if model.list_index is None:
|
if model.list_index is None:
|
||||||
data[model.identifier] = digest_reference
|
data[model.identifier] = digest_reference
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -113,9 +113,7 @@ class ProcessModelService(FileSystemService):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def save_process_model(cls, process_model: ProcessModelInfo) -> None:
|
def save_process_model(cls, process_model: ProcessModelInfo) -> None:
|
||||||
process_model_path = os.path.abspath(
|
process_model_path = os.path.abspath(os.path.join(FileSystemService.root_path(), process_model.id_for_file_path()))
|
||||||
os.path.join(FileSystemService.root_path(), process_model.id_for_file_path())
|
|
||||||
)
|
|
||||||
os.makedirs(process_model_path, exist_ok=True)
|
os.makedirs(process_model_path, exist_ok=True)
|
||||||
json_path = os.path.abspath(os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE))
|
json_path = os.path.abspath(os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE))
|
||||||
json_data = cls.PROCESS_MODEL_SCHEMA.dump(process_model)
|
json_data = cls.PROCESS_MODEL_SCHEMA.dump(process_model)
|
||||||
|
@ -126,9 +124,7 @@ class ProcessModelService(FileSystemService):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def process_model_delete(cls, process_model_id: str) -> None:
|
def process_model_delete(cls, process_model_id: str) -> None:
|
||||||
instances = ProcessInstanceModel.query.filter(
|
instances = ProcessInstanceModel.query.filter(ProcessInstanceModel.process_model_identifier == process_model_id).all()
|
||||||
ProcessInstanceModel.process_model_identifier == process_model_id
|
|
||||||
).all()
|
|
||||||
if len(instances) > 0:
|
if len(instances) > 0:
|
||||||
raise ProcessModelWithInstancesNotDeletableError(
|
raise ProcessModelWithInstancesNotDeletableError(
|
||||||
f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it."
|
f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it."
|
||||||
|
@ -218,8 +214,7 @@ class ProcessModelService(FileSystemService):
|
||||||
) -> list[ProcessModelInfo]:
|
) -> list[ProcessModelInfo]:
|
||||||
if filter_runnable_as_extension and filter_runnable_by_user:
|
if filter_runnable_as_extension and filter_runnable_by_user:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"It is not valid to filter process models by both filter_runnable_by_user and"
|
"It is not valid to filter process models by both filter_runnable_by_user and filter_runnable_as_extension"
|
||||||
" filter_runnable_as_extension"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# get the full list (before we filter it by the ones you are allowed to start)
|
# get the full list (before we filter it by the ones you are allowed to start)
|
||||||
|
@ -277,13 +272,9 @@ class ProcessModelService(FileSystemService):
|
||||||
|
|
||||||
permitted_process_model_identifiers = []
|
permitted_process_model_identifiers = []
|
||||||
for process_model_identifier in process_model_identifiers:
|
for process_model_identifier in process_model_identifiers:
|
||||||
modified_process_model_id = ProcessModelInfo.modify_process_identifier_for_path_param(
|
modified_process_model_id = ProcessModelInfo.modify_process_identifier_for_path_param(process_model_identifier)
|
||||||
process_model_identifier
|
|
||||||
)
|
|
||||||
uri = f"{permission_base_uri}/{modified_process_model_id}"
|
uri = f"{permission_base_uri}/{modified_process_model_id}"
|
||||||
has_permission = AuthorizationService.user_has_permission(
|
has_permission = AuthorizationService.user_has_permission(user=user, permission=permission_to_check, target_uri=uri)
|
||||||
user=user, permission=permission_to_check, target_uri=uri
|
|
||||||
)
|
|
||||||
if has_permission:
|
if has_permission:
|
||||||
permitted_process_model_identifiers.append(process_model_identifier)
|
permitted_process_model_identifiers.append(process_model_identifier)
|
||||||
|
|
||||||
|
@ -351,9 +342,7 @@ class ProcessModelService(FileSystemService):
|
||||||
for process_group in process_groups:
|
for process_group in process_groups:
|
||||||
modified_process_group_id = ProcessModelInfo.modify_process_identifier_for_path_param(process_group.id)
|
modified_process_group_id = ProcessModelInfo.modify_process_identifier_for_path_param(process_group.id)
|
||||||
uri = f"{permission_base_uri}/{modified_process_group_id}"
|
uri = f"{permission_base_uri}/{modified_process_group_id}"
|
||||||
has_permission = AuthorizationService.user_has_permission(
|
has_permission = AuthorizationService.user_has_permission(user=user, permission=permission_to_check, target_uri=uri)
|
||||||
user=user, permission=permission_to_check, target_uri=uri
|
|
||||||
)
|
|
||||||
if has_permission:
|
if has_permission:
|
||||||
new_process_group_list.append(process_group)
|
new_process_group_list.append(process_group)
|
||||||
return new_process_group_list
|
return new_process_group_list
|
||||||
|
@ -490,9 +479,7 @@ class ProcessModelService(FileSystemService):
|
||||||
if cls.is_process_group(nested_item.path):
|
if cls.is_process_group(nested_item.path):
|
||||||
# This is a nested group
|
# This is a nested group
|
||||||
process_group.process_groups.append(
|
process_group.process_groups.append(
|
||||||
cls.find_or_create_process_group(
|
cls.find_or_create_process_group(nested_item.path, find_all_nested_items=find_all_nested_items)
|
||||||
nested_item.path, find_all_nested_items=find_all_nested_items
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif ProcessModelService.is_process_model(nested_item.path):
|
elif ProcessModelService.is_process_model(nested_item.path):
|
||||||
process_group.process_models.append(
|
process_group.process_models.append(
|
||||||
|
|
|
@ -104,9 +104,7 @@ class ProcessModelTestRunnerMostlyPureSpiffDelegate(ProcessModelTestRunnerDelega
|
||||||
if sub_parser.process_executable:
|
if sub_parser.process_executable:
|
||||||
executable_process = sub_parser.bpmn_id
|
executable_process = sub_parser.bpmn_id
|
||||||
if executable_process is None:
|
if executable_process is None:
|
||||||
raise BpmnFileMissingExecutableProcessError(
|
raise BpmnFileMissingExecutableProcessError(f"Executable process cannot be found in {bpmn_file}. Test cannot run.")
|
||||||
f"Executable process cannot be found in {bpmn_file}. Test cannot run."
|
|
||||||
)
|
|
||||||
|
|
||||||
all_related = self._find_related_bpmn_files(bpmn_file)
|
all_related = self._find_related_bpmn_files(bpmn_file)
|
||||||
for related_file in all_related:
|
for related_file in all_related:
|
||||||
|
@ -254,9 +252,7 @@ class ProcessModelTestRunner:
|
||||||
f" class '{process_model_test_runner_delegate_class}' does not"
|
f" class '{process_model_test_runner_delegate_class}' does not"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.process_model_test_runner_delegate = process_model_test_runner_delegate_class(
|
self.process_model_test_runner_delegate = process_model_test_runner_delegate_class(process_model_directory_path)
|
||||||
process_model_directory_path
|
|
||||||
)
|
|
||||||
|
|
||||||
self.test_mappings = self._discover_process_model_test_cases()
|
self.test_mappings = self._discover_process_model_test_cases()
|
||||||
self.test_case_results: list[TestCaseResult] = []
|
self.test_case_results: list[TestCaseResult] = []
|
||||||
|
@ -388,9 +384,7 @@ class ProcessModelTestRunner:
|
||||||
def _get_relative_path_of_bpmn_file(self, bpmn_file: str) -> str:
|
def _get_relative_path_of_bpmn_file(self, bpmn_file: str) -> str:
|
||||||
return os.path.relpath(bpmn_file, start=self.process_model_directory_path)
|
return os.path.relpath(bpmn_file, start=self.process_model_directory_path)
|
||||||
|
|
||||||
def _exception_to_test_case_error_details(
|
def _exception_to_test_case_error_details(self, exception: Exception | WorkflowTaskException) -> TestCaseErrorDetails:
|
||||||
self, exception: Exception | WorkflowTaskException
|
|
||||||
) -> TestCaseErrorDetails:
|
|
||||||
error_messages = str(exception).split("\n")
|
error_messages = str(exception).split("\n")
|
||||||
test_case_error_details = TestCaseErrorDetails(error_messages=error_messages)
|
test_case_error_details = TestCaseErrorDetails(error_messages=error_messages)
|
||||||
if isinstance(exception, WorkflowTaskException):
|
if isinstance(exception, WorkflowTaskException):
|
||||||
|
|
|
@ -12,9 +12,7 @@ class ReferenceCacheService:
|
||||||
def add_unique_reference_cache_object(
|
def add_unique_reference_cache_object(
|
||||||
cls, reference_objects: dict[str, ReferenceCacheModel], reference_cache: ReferenceCacheModel
|
cls, reference_objects: dict[str, ReferenceCacheModel], reference_cache: ReferenceCacheModel
|
||||||
) -> None:
|
) -> None:
|
||||||
reference_cache_unique = (
|
reference_cache_unique = f"{reference_cache.identifier}{reference_cache.relative_location}{reference_cache.type}"
|
||||||
f"{reference_cache.identifier}{reference_cache.relative_location}{reference_cache.type}"
|
|
||||||
)
|
|
||||||
reference_objects[reference_cache_unique] = reference_cache
|
reference_objects[reference_cache_unique] = reference_cache
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -112,10 +112,7 @@ class ServiceTaskDelegate:
|
||||||
if code == 500:
|
if code == 500:
|
||||||
msg = "500 (Internal Server Error) - The service you called is experiencing technical difficulties."
|
msg = "500 (Internal Server Error) - The service you called is experiencing technical difficulties."
|
||||||
if code == 501:
|
if code == 501:
|
||||||
msg = (
|
msg = "501 (Not Implemented) - This service needs to be called with the different method (like POST not GET)."
|
||||||
"501 (Not Implemented) - This service needs to be called with the"
|
|
||||||
" different method (like POST not GET)."
|
|
||||||
)
|
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -149,11 +146,7 @@ class ServiceTaskDelegate:
|
||||||
) -> None:
|
) -> None:
|
||||||
# v2 support
|
# v2 support
|
||||||
base_error = None
|
base_error = None
|
||||||
if (
|
if "error" in parsed_response and isinstance(parsed_response["error"], dict) and "error_code" in parsed_response["error"]:
|
||||||
"error" in parsed_response
|
|
||||||
and isinstance(parsed_response["error"], dict)
|
|
||||||
and "error_code" in parsed_response["error"]
|
|
||||||
):
|
|
||||||
base_error = parsed_response["error"]
|
base_error = parsed_response["error"]
|
||||||
# v1 support or something terrible happened with a v2 connector
|
# v1 support or something terrible happened with a v2 connector
|
||||||
elif status_code >= 300:
|
elif status_code >= 300:
|
||||||
|
|
|
@ -146,9 +146,7 @@ class SpecFileService(FileSystemService):
|
||||||
try:
|
try:
|
||||||
parser.add_bpmn_xml(cls.get_etree_from_xml_bytes(binary_data), filename=file_name)
|
parser.add_bpmn_xml(cls.get_etree_from_xml_bytes(binary_data), filename=file_name)
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
raise ProcessModelFileInvalidError(
|
raise ProcessModelFileInvalidError(f"Received error trying to parse bpmn xml: {str(exception)}") from exception
|
||||||
f"Received error trying to parse bpmn xml: {str(exception)}"
|
|
||||||
) from exception
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_file(
|
def update_file(
|
||||||
|
@ -195,17 +193,13 @@ class SpecFileService(FileSystemService):
|
||||||
)
|
)
|
||||||
if len(called_element_refs) > 0:
|
if len(called_element_refs) > 0:
|
||||||
process_model_identifiers: list[str] = [r.relative_location for r in called_element_refs]
|
process_model_identifiers: list[str] = [r.relative_location for r in called_element_refs]
|
||||||
permitted_process_model_identifiers = (
|
permitted_process_model_identifiers = ProcessModelService.process_model_identifiers_with_permission_for_user(
|
||||||
ProcessModelService.process_model_identifiers_with_permission_for_user(
|
|
||||||
user=user,
|
user=user,
|
||||||
permission_to_check="create",
|
permission_to_check="create",
|
||||||
permission_base_uri="/v1.0/process-instances",
|
permission_base_uri="/v1.0/process-instances",
|
||||||
process_model_identifiers=process_model_identifiers,
|
process_model_identifiers=process_model_identifiers,
|
||||||
)
|
)
|
||||||
)
|
unpermitted_process_model_identifiers = set(process_model_identifiers) - set(permitted_process_model_identifiers)
|
||||||
unpermitted_process_model_identifiers = set(process_model_identifiers) - set(
|
|
||||||
permitted_process_model_identifiers
|
|
||||||
)
|
|
||||||
if len(unpermitted_process_model_identifiers):
|
if len(unpermitted_process_model_identifiers):
|
||||||
raise NotAuthorizedError(
|
raise NotAuthorizedError(
|
||||||
"You are not authorized to use one or more processes as a called element:"
|
"You are not authorized to use one or more processes as a called element:"
|
||||||
|
|
|
@ -66,9 +66,7 @@ class TaskModelError(Exception):
|
||||||
self.line_number = exception.lineno
|
self.line_number = exception.lineno
|
||||||
self.offset = exception.offset
|
self.offset = exception.offset
|
||||||
elif isinstance(exception, NameError):
|
elif isinstance(exception, NameError):
|
||||||
self.add_note(
|
self.add_note(WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys())))
|
||||||
WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys()))
|
|
||||||
)
|
|
||||||
|
|
||||||
# If encountered in a sub-workflow, this traces back up the stack,
|
# If encountered in a sub-workflow, this traces back up the stack,
|
||||||
# so we can tell how we got to this particular task, no matter how
|
# so we can tell how we got to this particular task, no matter how
|
||||||
|
@ -163,9 +161,7 @@ class TaskService:
|
||||||
"""
|
"""
|
||||||
(parent_subprocess_guid, _parent_subprocess) = self.__class__._task_subprocess(spiff_task)
|
(parent_subprocess_guid, _parent_subprocess) = self.__class__._task_subprocess(spiff_task)
|
||||||
if parent_subprocess_guid is not None:
|
if parent_subprocess_guid is not None:
|
||||||
spiff_task_of_parent_subprocess = spiff_task.workflow.top_workflow.get_task_from_id(
|
spiff_task_of_parent_subprocess = spiff_task.workflow.top_workflow.get_task_from_id(UUID(parent_subprocess_guid))
|
||||||
UUID(parent_subprocess_guid)
|
|
||||||
)
|
|
||||||
|
|
||||||
if spiff_task_of_parent_subprocess is not None:
|
if spiff_task_of_parent_subprocess is not None:
|
||||||
self.update_task_model_with_spiff_task(
|
self.update_task_model_with_spiff_task(
|
||||||
|
@ -196,15 +192,11 @@ class TaskService:
|
||||||
|
|
||||||
# we are not sure why task_model.bpmn_process can be None while task_model.bpmn_process_id actually has a valid value
|
# we are not sure why task_model.bpmn_process can be None while task_model.bpmn_process_id actually has a valid value
|
||||||
bpmn_process = (
|
bpmn_process = (
|
||||||
new_bpmn_process
|
new_bpmn_process or task_model.bpmn_process or BpmnProcessModel.query.filter_by(id=task_model.bpmn_process_id).first()
|
||||||
or task_model.bpmn_process
|
|
||||||
or BpmnProcessModel.query.filter_by(id=task_model.bpmn_process_id).first()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.update_task_model(task_model, spiff_task)
|
self.update_task_model(task_model, spiff_task)
|
||||||
bpmn_process_json_data = self.update_task_data_on_bpmn_process(
|
bpmn_process_json_data = self.update_task_data_on_bpmn_process(bpmn_process, bpmn_process_instance=spiff_task.workflow)
|
||||||
bpmn_process, bpmn_process_instance=spiff_task.workflow
|
|
||||||
)
|
|
||||||
if bpmn_process_json_data is not None:
|
if bpmn_process_json_data is not None:
|
||||||
self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
|
self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
|
||||||
self.task_models[task_model.guid] = task_model
|
self.task_models[task_model.guid] = task_model
|
||||||
|
@ -248,18 +240,14 @@ class TaskService:
|
||||||
new_properties_json["success"] = spiff_workflow.success
|
new_properties_json["success"] = spiff_workflow.success
|
||||||
bpmn_process.properties_json = new_properties_json
|
bpmn_process.properties_json = new_properties_json
|
||||||
|
|
||||||
bpmn_process_json_data = self.update_task_data_on_bpmn_process(
|
bpmn_process_json_data = self.update_task_data_on_bpmn_process(bpmn_process, bpmn_process_instance=spiff_workflow)
|
||||||
bpmn_process, bpmn_process_instance=spiff_workflow
|
|
||||||
)
|
|
||||||
if bpmn_process_json_data is not None:
|
if bpmn_process_json_data is not None:
|
||||||
self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
|
self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
|
||||||
|
|
||||||
self.bpmn_processes[bpmn_process.guid or "top_level"] = bpmn_process
|
self.bpmn_processes[bpmn_process.guid or "top_level"] = bpmn_process
|
||||||
|
|
||||||
if spiff_workflow.parent_task_id:
|
if spiff_workflow.parent_task_id:
|
||||||
direct_parent_bpmn_process = BpmnProcessModel.query.filter_by(
|
direct_parent_bpmn_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first()
|
||||||
id=bpmn_process.direct_parent_process_id
|
|
||||||
).first()
|
|
||||||
self.update_bpmn_process(spiff_workflow.parent_workflow, direct_parent_bpmn_process)
|
self.update_bpmn_process(spiff_workflow.parent_workflow, direct_parent_bpmn_process)
|
||||||
|
|
||||||
def update_task_model(
|
def update_task_model(
|
||||||
|
@ -396,9 +384,7 @@ class TaskService:
|
||||||
for subprocess_guid in list(subprocesses):
|
for subprocess_guid in list(subprocesses):
|
||||||
subprocess = subprocesses[subprocess_guid]
|
subprocess = subprocesses[subprocess_guid]
|
||||||
if subprocess == spiff_workflow.parent_workflow:
|
if subprocess == spiff_workflow.parent_workflow:
|
||||||
direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(
|
direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(guid=str(subprocess_guid)).first()
|
||||||
guid=str(subprocess_guid)
|
|
||||||
).first()
|
|
||||||
if direct_bpmn_process_parent is None:
|
if direct_bpmn_process_parent is None:
|
||||||
raise BpmnProcessNotFoundError(
|
raise BpmnProcessNotFoundError(
|
||||||
f"Could not find bpmn process with guid: {str(subprocess_guid)} "
|
f"Could not find bpmn process with guid: {str(subprocess_guid)} "
|
||||||
|
@ -406,9 +392,7 @@ class TaskService:
|
||||||
)
|
)
|
||||||
|
|
||||||
if direct_bpmn_process_parent is None:
|
if direct_bpmn_process_parent is None:
|
||||||
raise BpmnProcessNotFoundError(
|
raise BpmnProcessNotFoundError(f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}")
|
||||||
f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}"
|
|
||||||
)
|
|
||||||
|
|
||||||
bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id
|
bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id
|
||||||
|
|
||||||
|
@ -468,9 +452,7 @@ class TaskService:
|
||||||
# Remove all the deleted/pruned tasks from the database.
|
# Remove all the deleted/pruned tasks from the database.
|
||||||
deleted_task_guids = [str(t.id) for t in deleted_spiff_tasks]
|
deleted_task_guids = [str(t.id) for t in deleted_spiff_tasks]
|
||||||
tasks_to_clear = TaskModel.query.filter(TaskModel.guid.in_(deleted_task_guids)).all() # type: ignore
|
tasks_to_clear = TaskModel.query.filter(TaskModel.guid.in_(deleted_task_guids)).all() # type: ignore
|
||||||
human_tasks_to_clear = HumanTaskModel.query.filter(
|
human_tasks_to_clear = HumanTaskModel.query.filter(HumanTaskModel.task_id.in_(deleted_task_guids)).all() # type: ignore
|
||||||
HumanTaskModel.task_id.in_(deleted_task_guids) # type: ignore
|
|
||||||
).all()
|
|
||||||
|
|
||||||
# delete human tasks first to avoid potential conflicts when deleting tasks.
|
# delete human tasks first to avoid potential conflicts when deleting tasks.
|
||||||
# otherwise sqlalchemy returns several warnings.
|
# otherwise sqlalchemy returns several warnings.
|
||||||
|
@ -587,25 +569,19 @@ class TaskService:
|
||||||
return (bpmn_processes, task_models)
|
return (bpmn_processes, task_models)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def full_bpmn_process_path(
|
def full_bpmn_process_path(cls, bpmn_process: BpmnProcessModel, definition_column: str = "bpmn_identifier") -> list[str]:
|
||||||
cls, bpmn_process: BpmnProcessModel, definition_column: str = "bpmn_identifier"
|
|
||||||
) -> list[str]:
|
|
||||||
"""Returns a list of bpmn process identifiers pointing the given bpmn_process."""
|
"""Returns a list of bpmn process identifiers pointing the given bpmn_process."""
|
||||||
bpmn_process_identifiers: list[str] = []
|
bpmn_process_identifiers: list[str] = []
|
||||||
if bpmn_process.guid:
|
if bpmn_process.guid:
|
||||||
task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
|
task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
|
||||||
if task_model is None:
|
if task_model is None:
|
||||||
raise TaskNotFoundError(
|
raise TaskNotFoundError(f"Cannot find the corresponding task for the bpmn process with guid {bpmn_process.guid}.")
|
||||||
f"Cannot find the corresponding task for the bpmn process with guid {bpmn_process.guid}."
|
|
||||||
)
|
|
||||||
(
|
(
|
||||||
parent_bpmn_processes,
|
parent_bpmn_processes,
|
||||||
_task_models_of_parent_bpmn_processes,
|
_task_models_of_parent_bpmn_processes,
|
||||||
) = TaskService.task_models_of_parent_bpmn_processes(task_model)
|
) = TaskService.task_models_of_parent_bpmn_processes(task_model)
|
||||||
for parent_bpmn_process in parent_bpmn_processes:
|
for parent_bpmn_process in parent_bpmn_processes:
|
||||||
bpmn_process_identifiers.append(
|
bpmn_process_identifiers.append(getattr(parent_bpmn_process.bpmn_process_definition, definition_column))
|
||||||
getattr(parent_bpmn_process.bpmn_process_definition, definition_column)
|
|
||||||
)
|
|
||||||
bpmn_process_identifiers.append(getattr(bpmn_process.bpmn_process_definition, definition_column))
|
bpmn_process_identifiers.append(getattr(bpmn_process.bpmn_process_definition, definition_column))
|
||||||
return bpmn_process_identifiers
|
return bpmn_process_identifiers
|
||||||
|
|
||||||
|
@ -631,9 +607,7 @@ class TaskService:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def is_main_process_end_event(cls, spiff_task: SpiffTask) -> bool:
|
def is_main_process_end_event(cls, spiff_task: SpiffTask) -> bool:
|
||||||
return (
|
return cls.get_task_type_from_spiff_task(spiff_task) == "EndEvent" and spiff_task.workflow.parent_workflow is None
|
||||||
cls.get_task_type_from_spiff_task(spiff_task) == "EndEvent" and spiff_task.workflow.parent_workflow is None
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def bpmn_process_for_called_activity_or_top_level_process(cls, task_model: TaskModel) -> BpmnProcessModel:
|
def bpmn_process_for_called_activity_or_top_level_process(cls, task_model: TaskModel) -> BpmnProcessModel:
|
||||||
|
@ -668,16 +642,12 @@ class TaskService:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_ready_signals_with_button_labels(cls, process_instance_id: int, associated_task_guid: str) -> list[dict]:
|
def get_ready_signals_with_button_labels(cls, process_instance_id: int, associated_task_guid: str) -> list[dict]:
|
||||||
waiting_tasks: list[TaskModel] = TaskModel.query.filter_by(
|
waiting_tasks: list[TaskModel] = TaskModel.query.filter_by(state="WAITING", process_instance_id=process_instance_id).all()
|
||||||
state="WAITING", process_instance_id=process_instance_id
|
|
||||||
).all()
|
|
||||||
result = []
|
result = []
|
||||||
for task_model in waiting_tasks:
|
for task_model in waiting_tasks:
|
||||||
task_definition = task_model.task_definition
|
task_definition = task_model.task_definition
|
||||||
extensions: dict = (
|
extensions: dict = (
|
||||||
task_definition.properties_json["extensions"]
|
task_definition.properties_json["extensions"] if "extensions" in task_definition.properties_json else {}
|
||||||
if "extensions" in task_definition.properties_json
|
|
||||||
else {}
|
|
||||||
)
|
)
|
||||||
event_definition: dict = (
|
event_definition: dict = (
|
||||||
task_definition.properties_json["event_definition"]
|
task_definition.properties_json["event_definition"]
|
||||||
|
@ -748,9 +718,7 @@ class TaskService:
|
||||||
spiff_task: SpiffTask,
|
spiff_task: SpiffTask,
|
||||||
bpmn_definition_to_task_definitions_mappings: dict,
|
bpmn_definition_to_task_definitions_mappings: dict,
|
||||||
) -> TaskModel:
|
) -> TaskModel:
|
||||||
task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][
|
task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name]
|
||||||
spiff_task.task_spec.name
|
|
||||||
]
|
|
||||||
task_model = TaskModel(
|
task_model = TaskModel(
|
||||||
guid=str(spiff_task.id),
|
guid=str(spiff_task.id),
|
||||||
bpmn_process_id=bpmn_process.id,
|
bpmn_process_id=bpmn_process.id,
|
||||||
|
@ -760,9 +728,7 @@ class TaskService:
|
||||||
return task_model
|
return task_model
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_python_env_data_dict_from_spiff_task(
|
def _get_python_env_data_dict_from_spiff_task(cls, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer) -> dict:
|
||||||
cls, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer
|
|
||||||
) -> dict:
|
|
||||||
user_defined_state = spiff_task.workflow.script_engine.environment.user_defined_state()
|
user_defined_state = spiff_task.workflow.script_engine.environment.user_defined_state()
|
||||||
# this helps to convert items like datetime objects to be json serializable
|
# this helps to convert items like datetime objects to be json serializable
|
||||||
converted_data: dict = serializer.registry.convert(user_defined_state)
|
converted_data: dict = serializer.registry.convert(user_defined_state)
|
||||||
|
|
|
@ -154,11 +154,7 @@ class UserService:
|
||||||
@classmethod
|
@classmethod
|
||||||
def apply_waiting_group_assignments(cls, user: UserModel) -> None:
|
def apply_waiting_group_assignments(cls, user: UserModel) -> None:
|
||||||
"""Only called from create_user which is normally called at sign-in time"""
|
"""Only called from create_user which is normally called at sign-in time"""
|
||||||
waiting = (
|
waiting = UserGroupAssignmentWaitingModel().query.filter(UserGroupAssignmentWaitingModel.username == user.username).all()
|
||||||
UserGroupAssignmentWaitingModel()
|
|
||||||
.query.filter(UserGroupAssignmentWaitingModel.username == user.username)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
for assignment in waiting:
|
for assignment in waiting:
|
||||||
cls.add_user_to_group(user, assignment.group)
|
cls.add_user_to_group(user, assignment.group)
|
||||||
db.session.delete(assignment)
|
db.session.delete(assignment)
|
||||||
|
@ -174,9 +170,7 @@ class UserService:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_by_service_and_service_id(service: str, service_id: str) -> UserModel | None:
|
def get_user_by_service_and_service_id(service: str, service_id: str) -> UserModel | None:
|
||||||
user: UserModel = (
|
user: UserModel = UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
||||||
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
|
||||||
)
|
|
||||||
if user:
|
if user:
|
||||||
return user
|
return user
|
||||||
return None
|
return None
|
||||||
|
@ -184,9 +178,7 @@ class UserService:
|
||||||
@classmethod
|
@classmethod
|
||||||
def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None:
|
def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None:
|
||||||
group_ids = [g.id for g in user.groups]
|
group_ids = [g.id for g in user.groups]
|
||||||
human_tasks = HumanTaskModel.query.filter(
|
human_tasks = HumanTaskModel.query.filter(HumanTaskModel.lane_assignment_id.in_(group_ids)).all() # type: ignore
|
||||||
HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore
|
|
||||||
).all()
|
|
||||||
for human_task in human_tasks:
|
for human_task in human_tasks:
|
||||||
human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id)
|
human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id)
|
||||||
db.session.add(human_task_user)
|
db.session.add(human_task_user)
|
||||||
|
@ -272,9 +264,7 @@ class UserService:
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def find_or_create_guest_user(
|
def find_or_create_guest_user(cls, username: str = SPIFF_GUEST_USER, group_identifier: str = SPIFF_GUEST_GROUP) -> UserModel:
|
||||||
cls, username: str = SPIFF_GUEST_USER, group_identifier: str = SPIFF_GUEST_GROUP
|
|
||||||
) -> UserModel:
|
|
||||||
guest_user: UserModel | None = UserModel.query.filter_by(
|
guest_user: UserModel | None = UserModel.query.filter_by(
|
||||||
username=username, service="spiff_guest_service", service_id="spiff_guest_service_id"
|
username=username, service="spiff_guest_service", service_id="spiff_guest_service_id"
|
||||||
).first()
|
).first()
|
||||||
|
|
|
@ -4,6 +4,7 @@ import concurrent.futures
|
||||||
import time
|
import time
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
from datetime import datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
|
@ -13,18 +14,26 @@ from flask import g
|
||||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
|
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
|
||||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition # type: ignore
|
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition # type: ignore
|
||||||
|
from SpiffWorkflow.bpmn.specs.mixins.events.event_types import CatchingEvent # type: ignore
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.util.task import TaskState # type: ignore
|
from SpiffWorkflow.util.task import TaskState # type: ignore
|
||||||
|
|
||||||
|
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
|
||||||
|
queue_future_task_if_appropriate,
|
||||||
|
)
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
|
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
from spiffworkflow_backend.models.future_task import FutureTaskModel
|
||||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||||
from spiffworkflow_backend.models.message_instance_correlation import MessageInstanceCorrelationRuleModel
|
from spiffworkflow_backend.models.message_instance_correlation import MessageInstanceCorrelationRuleModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||||
|
from spiffworkflow_backend.models.task_instructions_for_end_user import TaskInstructionsForEndUserModel
|
||||||
from spiffworkflow_backend.services.assertion_service import safe_assertion
|
from spiffworkflow_backend.services.assertion_service import safe_assertion
|
||||||
|
from spiffworkflow_backend.services.jinja_service import JinjaService
|
||||||
from spiffworkflow_backend.services.process_instance_lock_service import ProcessInstanceLockService
|
from spiffworkflow_backend.services.process_instance_lock_service import ProcessInstanceLockService
|
||||||
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
|
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
|
||||||
from spiffworkflow_backend.services.task_service import StartAndEndTimes
|
from spiffworkflow_backend.services.task_service import StartAndEndTimes
|
||||||
|
@ -51,6 +60,12 @@ class ExecutionStrategyNotConfiguredError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TaskRunnability(SpiffEnum):
|
||||||
|
has_ready_tasks = "has_ready_tasks"
|
||||||
|
no_ready_tasks = "no_ready_tasks"
|
||||||
|
unknown_if_ready_tasks = "unknown_if_ready_tasks"
|
||||||
|
|
||||||
|
|
||||||
class EngineStepDelegate:
|
class EngineStepDelegate:
|
||||||
"""Interface of sorts for a concrete engine step delegate."""
|
"""Interface of sorts for a concrete engine step delegate."""
|
||||||
|
|
||||||
|
@ -81,43 +96,44 @@ SubprocessSpecLoader = Callable[[], dict[str, Any] | None]
|
||||||
class ExecutionStrategy:
|
class ExecutionStrategy:
|
||||||
"""Interface of sorts for a concrete execution strategy."""
|
"""Interface of sorts for a concrete execution strategy."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, delegate: EngineStepDelegate, subprocess_spec_loader: SubprocessSpecLoader, options: dict | None = None):
|
||||||
self, delegate: EngineStepDelegate, subprocess_spec_loader: SubprocessSpecLoader, options: dict | None = None
|
|
||||||
):
|
|
||||||
self.delegate = delegate
|
self.delegate = delegate
|
||||||
self.subprocess_spec_loader = subprocess_spec_loader
|
self.subprocess_spec_loader = subprocess_spec_loader
|
||||||
self.options = options
|
self.options = options
|
||||||
|
|
||||||
def should_break_before(self, tasks: list[SpiffTask]) -> bool:
|
def should_break_before(self, tasks: list[SpiffTask], process_instance_model: ProcessInstanceModel) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def should_break_after(self, tasks: list[SpiffTask]) -> bool:
|
def should_break_after(self, tasks: list[SpiffTask]) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def should_do_before(self, bpmn_process_instance: BpmnWorkflow, process_instance_model: ProcessInstanceModel) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
def _run(
|
def _run(
|
||||||
self,
|
self,
|
||||||
spiff_task: SpiffTask,
|
spiff_task: SpiffTask,
|
||||||
app: flask.app.Flask,
|
app: flask.app.Flask,
|
||||||
process_instance_id: Any | None,
|
|
||||||
process_model_identifier: Any | None,
|
|
||||||
user: Any | None,
|
user: Any | None,
|
||||||
) -> SpiffTask:
|
) -> SpiffTask:
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
app.config["THREAD_LOCAL_DATA"].process_instance_id = process_instance_id
|
|
||||||
app.config["THREAD_LOCAL_DATA"].process_model_identifier = process_model_identifier
|
|
||||||
g.user = user
|
g.user = user
|
||||||
spiff_task.run()
|
spiff_task.run()
|
||||||
return spiff_task
|
return spiff_task
|
||||||
|
|
||||||
def spiff_run(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None:
|
def spiff_run(
|
||||||
# Note
|
self, bpmn_process_instance: BpmnWorkflow, process_instance_model: ProcessInstanceModel, exit_at: None = None
|
||||||
|
) -> TaskRunnability:
|
||||||
while True:
|
while True:
|
||||||
bpmn_process_instance.refresh_waiting_tasks()
|
bpmn_process_instance.refresh_waiting_tasks()
|
||||||
|
self.should_do_before(bpmn_process_instance, process_instance_model)
|
||||||
engine_steps = self.get_ready_engine_steps(bpmn_process_instance)
|
engine_steps = self.get_ready_engine_steps(bpmn_process_instance)
|
||||||
if self.should_break_before(engine_steps):
|
|
||||||
break
|
|
||||||
num_steps = len(engine_steps)
|
num_steps = len(engine_steps)
|
||||||
|
if self.should_break_before(engine_steps, process_instance_model=process_instance_model):
|
||||||
|
task_runnability = TaskRunnability.has_ready_tasks if num_steps > 0 else TaskRunnability.no_ready_tasks
|
||||||
|
break
|
||||||
if num_steps == 0:
|
if num_steps == 0:
|
||||||
|
task_runnability = TaskRunnability.no_ready_tasks
|
||||||
break
|
break
|
||||||
elif num_steps == 1:
|
elif num_steps == 1:
|
||||||
spiff_task = engine_steps[0]
|
spiff_task = engine_steps[0]
|
||||||
|
@ -130,11 +146,6 @@ class ExecutionStrategy:
|
||||||
# service tasks at once - many api calls, and then get those responses back without
|
# service tasks at once - many api calls, and then get those responses back without
|
||||||
# waiting for each individual task to complete.
|
# waiting for each individual task to complete.
|
||||||
futures = []
|
futures = []
|
||||||
process_instance_id = None
|
|
||||||
process_model_identifier = None
|
|
||||||
if hasattr(current_app.config["THREAD_LOCAL_DATA"], "process_instance_id"):
|
|
||||||
process_instance_id = current_app.config["THREAD_LOCAL_DATA"].process_instance_id
|
|
||||||
process_model_identifier = current_app.config["THREAD_LOCAL_DATA"].process_model_identifier
|
|
||||||
user = None
|
user = None
|
||||||
if hasattr(g, "user"):
|
if hasattr(g, "user"):
|
||||||
user = g.user
|
user = g.user
|
||||||
|
@ -147,8 +158,6 @@ class ExecutionStrategy:
|
||||||
self._run,
|
self._run,
|
||||||
spiff_task,
|
spiff_task,
|
||||||
current_app._get_current_object(),
|
current_app._get_current_object(),
|
||||||
process_instance_id,
|
|
||||||
process_model_identifier,
|
|
||||||
user,
|
user,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -156,9 +165,12 @@ class ExecutionStrategy:
|
||||||
spiff_task = future.result()
|
spiff_task = future.result()
|
||||||
self.delegate.did_complete_task(spiff_task)
|
self.delegate.did_complete_task(spiff_task)
|
||||||
if self.should_break_after(engine_steps):
|
if self.should_break_after(engine_steps):
|
||||||
|
# we could call the stuff at the top of the loop again and find out, but let's not do that unless we need to
|
||||||
|
task_runnability = TaskRunnability.unknown_if_ready_tasks
|
||||||
break
|
break
|
||||||
|
|
||||||
self.delegate.after_engine_steps(bpmn_process_instance)
|
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||||
|
return task_runnability
|
||||||
|
|
||||||
def on_exception(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
def on_exception(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
self.delegate.on_exception(bpmn_process_instance)
|
self.delegate.on_exception(bpmn_process_instance)
|
||||||
|
@ -286,6 +298,43 @@ class GreedyExecutionStrategy(ExecutionStrategy):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class QueueInstructionsForEndUserExecutionStrategy(ExecutionStrategy):
|
||||||
|
"""When you want to run tasks with instructionsForEndUser but you want to queue them.
|
||||||
|
|
||||||
|
The queue can be used to display the instructions to user later.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, delegate: EngineStepDelegate, subprocess_spec_loader: SubprocessSpecLoader, options: dict | None = None):
|
||||||
|
super().__init__(delegate, subprocess_spec_loader, options)
|
||||||
|
self.tasks_that_have_been_seen: set[str] = set()
|
||||||
|
|
||||||
|
def should_do_before(self, bpmn_process_instance: BpmnWorkflow, process_instance_model: ProcessInstanceModel) -> None:
|
||||||
|
tasks = bpmn_process_instance.get_tasks(state=TaskState.WAITING | TaskState.READY)
|
||||||
|
for spiff_task in tasks:
|
||||||
|
if hasattr(spiff_task.task_spec, "extensions") and spiff_task.task_spec.extensions.get(
|
||||||
|
"instructionsForEndUser", None
|
||||||
|
):
|
||||||
|
task_guid = str(spiff_task.id)
|
||||||
|
if task_guid in self.tasks_that_have_been_seen:
|
||||||
|
continue
|
||||||
|
instruction = JinjaService.render_instructions_for_end_user(spiff_task)
|
||||||
|
if instruction != "":
|
||||||
|
TaskInstructionsForEndUserModel.insert_or_update_record(
|
||||||
|
task_guid=str(spiff_task.id),
|
||||||
|
process_instance_id=process_instance_model.id,
|
||||||
|
instruction=instruction,
|
||||||
|
)
|
||||||
|
self.tasks_that_have_been_seen.add(str(spiff_task.id))
|
||||||
|
|
||||||
|
def should_break_before(self, tasks: list[SpiffTask], process_instance_model: ProcessInstanceModel) -> bool:
|
||||||
|
for spiff_task in tasks:
|
||||||
|
if hasattr(spiff_task.task_spec, "extensions") and spiff_task.task_spec.extensions.get(
|
||||||
|
"instructionsForEndUser", None
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class RunUntilUserTaskOrMessageExecutionStrategy(ExecutionStrategy):
|
class RunUntilUserTaskOrMessageExecutionStrategy(ExecutionStrategy):
|
||||||
"""When you want to run tasks until you hit something to report to the end user.
|
"""When you want to run tasks until you hit something to report to the end user.
|
||||||
|
|
||||||
|
@ -293,9 +342,11 @@ class RunUntilUserTaskOrMessageExecutionStrategy(ExecutionStrategy):
|
||||||
but will stop if it hits instructions after the first task.
|
but will stop if it hits instructions after the first task.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def should_break_before(self, tasks: list[SpiffTask]) -> bool:
|
def should_break_before(self, tasks: list[SpiffTask], process_instance_model: ProcessInstanceModel) -> bool:
|
||||||
for task in tasks:
|
for spiff_task in tasks:
|
||||||
if hasattr(task.task_spec, "extensions") and task.task_spec.extensions.get("instructionsForEndUser", None):
|
if hasattr(spiff_task.task_spec, "extensions") and spiff_task.task_spec.extensions.get(
|
||||||
|
"instructionsForEndUser", None
|
||||||
|
):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -310,8 +361,11 @@ class RunCurrentReadyTasksExecutionStrategy(ExecutionStrategy):
|
||||||
class SkipOneExecutionStrategy(ExecutionStrategy):
|
class SkipOneExecutionStrategy(ExecutionStrategy):
|
||||||
"""When you want to skip over the next task, rather than execute it."""
|
"""When you want to skip over the next task, rather than execute it."""
|
||||||
|
|
||||||
def spiff_run(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None:
|
def spiff_run(
|
||||||
|
self, bpmn_process_instance: BpmnWorkflow, process_instance_model: ProcessInstanceModel, exit_at: None = None
|
||||||
|
) -> TaskRunnability:
|
||||||
spiff_task = None
|
spiff_task = None
|
||||||
|
engine_steps = []
|
||||||
if self.options and "spiff_task" in self.options.keys():
|
if self.options and "spiff_task" in self.options.keys():
|
||||||
spiff_task = self.options["spiff_task"]
|
spiff_task = self.options["spiff_task"]
|
||||||
else:
|
else:
|
||||||
|
@ -323,13 +377,15 @@ class SkipOneExecutionStrategy(ExecutionStrategy):
|
||||||
spiff_task.complete()
|
spiff_task.complete()
|
||||||
self.delegate.did_complete_task(spiff_task)
|
self.delegate.did_complete_task(spiff_task)
|
||||||
self.delegate.after_engine_steps(bpmn_process_instance)
|
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||||
|
# even if there was just 1 engine_step, and we ran it, we can't know that there is not another one
|
||||||
|
# that resulted from running that one, hence the unknown_if_ready_tasks
|
||||||
|
return TaskRunnability.has_ready_tasks if len(engine_steps) > 1 else TaskRunnability.unknown_if_ready_tasks
|
||||||
|
|
||||||
|
|
||||||
def execution_strategy_named(
|
def execution_strategy_named(name: str, delegate: EngineStepDelegate, spec_loader: SubprocessSpecLoader) -> ExecutionStrategy:
|
||||||
name: str, delegate: EngineStepDelegate, spec_loader: SubprocessSpecLoader
|
|
||||||
) -> ExecutionStrategy:
|
|
||||||
cls = {
|
cls = {
|
||||||
"greedy": GreedyExecutionStrategy,
|
"greedy": GreedyExecutionStrategy,
|
||||||
|
"queue_instructions_for_end_user": QueueInstructionsForEndUserExecutionStrategy,
|
||||||
"run_until_user_message": RunUntilUserTaskOrMessageExecutionStrategy,
|
"run_until_user_message": RunUntilUserTaskOrMessageExecutionStrategy,
|
||||||
"run_current_ready_tasks": RunCurrentReadyTasksExecutionStrategy,
|
"run_current_ready_tasks": RunCurrentReadyTasksExecutionStrategy,
|
||||||
"skip_one": SkipOneExecutionStrategy,
|
"skip_one": SkipOneExecutionStrategy,
|
||||||
|
@ -352,21 +408,27 @@ class WorkflowExecutionService:
|
||||||
execution_strategy: ExecutionStrategy,
|
execution_strategy: ExecutionStrategy,
|
||||||
process_instance_completer: ProcessInstanceCompleter,
|
process_instance_completer: ProcessInstanceCompleter,
|
||||||
process_instance_saver: ProcessInstanceSaver,
|
process_instance_saver: ProcessInstanceSaver,
|
||||||
|
additional_processing_identifier: str | None = None,
|
||||||
):
|
):
|
||||||
self.bpmn_process_instance = bpmn_process_instance
|
self.bpmn_process_instance = bpmn_process_instance
|
||||||
self.process_instance_model = process_instance_model
|
self.process_instance_model = process_instance_model
|
||||||
self.execution_strategy = execution_strategy
|
self.execution_strategy = execution_strategy
|
||||||
self.process_instance_completer = process_instance_completer
|
self.process_instance_completer = process_instance_completer
|
||||||
self.process_instance_saver = process_instance_saver
|
self.process_instance_saver = process_instance_saver
|
||||||
|
self.additional_processing_identifier = additional_processing_identifier
|
||||||
|
|
||||||
# names of methods that do spiff stuff:
|
# names of methods that do spiff stuff:
|
||||||
# processor.do_engine_steps calls:
|
# processor.do_engine_steps calls:
|
||||||
# run
|
# run
|
||||||
# execution_strategy.spiff_run
|
# execution_strategy.spiff_run
|
||||||
# spiff.[some_run_task_method]
|
# spiff.[some_run_task_method]
|
||||||
def run_and_save(self, exit_at: None = None, save: bool = False) -> None:
|
def run_and_save(self, exit_at: None = None, save: bool = False) -> TaskRunnability:
|
||||||
if self.process_instance_model.persistence_level != "none":
|
if self.process_instance_model.persistence_level != "none":
|
||||||
with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped:
|
with safe_assertion(
|
||||||
|
ProcessInstanceLockService.has_lock(
|
||||||
|
self.process_instance_model.id, additional_processing_identifier=self.additional_processing_identifier
|
||||||
|
)
|
||||||
|
) as tripped:
|
||||||
if tripped:
|
if tripped:
|
||||||
raise AssertionError(
|
raise AssertionError(
|
||||||
"The current thread has not obtained a lock for this process"
|
"The current thread has not obtained a lock for this process"
|
||||||
|
@ -376,13 +438,17 @@ class WorkflowExecutionService:
|
||||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||||
|
|
||||||
# TODO: implicit re-entrant locks here `with_dequeued`
|
# TODO: implicit re-entrant locks here `with_dequeued`
|
||||||
self.execution_strategy.spiff_run(self.bpmn_process_instance, exit_at)
|
task_runnability = self.execution_strategy.spiff_run(
|
||||||
|
self.bpmn_process_instance, exit_at=exit_at, process_instance_model=self.process_instance_model
|
||||||
|
)
|
||||||
|
|
||||||
if self.bpmn_process_instance.is_completed():
|
if self.bpmn_process_instance.is_completed():
|
||||||
self.process_instance_completer(self.bpmn_process_instance)
|
self.process_instance_completer(self.bpmn_process_instance)
|
||||||
|
|
||||||
self.process_bpmn_messages()
|
self.process_bpmn_messages()
|
||||||
self.queue_waiting_receive_messages()
|
self.queue_waiting_receive_messages()
|
||||||
|
self.schedule_waiting_timer_events()
|
||||||
|
return task_runnability
|
||||||
except WorkflowTaskException as wte:
|
except WorkflowTaskException as wte:
|
||||||
ProcessInstanceTmpService.add_event_to_process_instance(
|
ProcessInstanceTmpService.add_event_to_process_instance(
|
||||||
self.process_instance_model,
|
self.process_instance_model,
|
||||||
|
@ -398,10 +464,32 @@ class WorkflowExecutionService:
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if self.process_instance_model.persistence_level != "none":
|
if self.process_instance_model.persistence_level != "none":
|
||||||
|
# even if a task fails, try to persist all tasks, which will include the error state.
|
||||||
self.execution_strategy.add_object_to_db_session(self.bpmn_process_instance)
|
self.execution_strategy.add_object_to_db_session(self.bpmn_process_instance)
|
||||||
if save:
|
if save:
|
||||||
self.process_instance_saver()
|
self.process_instance_saver()
|
||||||
|
|
||||||
|
def is_happening_soon(self, time_in_seconds: int) -> bool:
|
||||||
|
# if it is supposed to happen in less than the amount of time we take between polling runs
|
||||||
|
return time_in_seconds - time.time() < int(
|
||||||
|
current_app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_FUTURE_TASK_EXECUTION_INTERVAL_IN_SECONDS"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def schedule_waiting_timer_events(self) -> None:
|
||||||
|
# TODO: update to always insert records so we can remove user_input_required and possibly waiting apscheduler jobs
|
||||||
|
if current_app.config["SPIFFWORKFLOW_BACKEND_CELERY_ENABLED"]:
|
||||||
|
waiting_tasks = self.bpmn_process_instance.get_tasks(state=TaskState.WAITING, spec_class=CatchingEvent)
|
||||||
|
for spiff_task in waiting_tasks:
|
||||||
|
event = spiff_task.task_spec.event_definition.details(spiff_task)
|
||||||
|
if "Time" in event.event_type:
|
||||||
|
time_string = event.value
|
||||||
|
run_at_in_seconds = round(datetime.fromisoformat(time_string).timestamp())
|
||||||
|
FutureTaskModel.insert_or_update(guid=str(spiff_task.id), run_at_in_seconds=run_at_in_seconds)
|
||||||
|
if self.is_happening_soon(run_at_in_seconds):
|
||||||
|
queue_future_task_if_appropriate(
|
||||||
|
self.process_instance_model, eta_in_seconds=run_at_in_seconds, task_guid=str(spiff_task.id)
|
||||||
|
)
|
||||||
|
|
||||||
def process_bpmn_messages(self) -> None:
|
def process_bpmn_messages(self) -> None:
|
||||||
# FIXE: get_events clears out the events so if we have other events we care about
|
# FIXE: get_events clears out the events so if we have other events we care about
|
||||||
# this will clear them out as well.
|
# this will clear them out as well.
|
||||||
|
@ -474,10 +562,12 @@ class WorkflowExecutionService:
|
||||||
class ProfiledWorkflowExecutionService(WorkflowExecutionService):
|
class ProfiledWorkflowExecutionService(WorkflowExecutionService):
|
||||||
"""A profiled version of the workflow execution service."""
|
"""A profiled version of the workflow execution service."""
|
||||||
|
|
||||||
def run_and_save(self, exit_at: None = None, save: bool = False) -> None:
|
def run_and_save(self, exit_at: None = None, save: bool = False) -> TaskRunnability:
|
||||||
import cProfile
|
import cProfile
|
||||||
from pstats import SortKey
|
from pstats import SortKey
|
||||||
|
|
||||||
|
task_runnability = TaskRunnability.unknown_if_ready_tasks
|
||||||
with cProfile.Profile() as pr:
|
with cProfile.Profile() as pr:
|
||||||
super().run_and_save(exit_at=exit_at, save=save)
|
task_runnability = super().run_and_save(exit_at=exit_at, save=save)
|
||||||
pr.print_stats(sort=SortKey.CUMULATIVE)
|
pr.print_stats(sort=SortKey.CUMULATIVE)
|
||||||
|
return task_runnability
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||||
|
<bpmn:process id="Process_6z10t9v" isExecutable="true">
|
||||||
|
<bpmn:startEvent id="StartEvent_1">
|
||||||
|
<bpmn:outgoing>Flow_0jml23i</bpmn:outgoing>
|
||||||
|
</bpmn:startEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_0jml23i" sourceRef="StartEvent_1" targetRef="script_one" />
|
||||||
|
<bpmn:endEvent id="Event_16twbu2">
|
||||||
|
<bpmn:incoming>Flow_0xzoduo</bpmn:incoming>
|
||||||
|
</bpmn:endEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_0ula2mv" sourceRef="script_one" targetRef="script_two" />
|
||||||
|
<bpmn:sequenceFlow id="Flow_0xzoduo" sourceRef="script_two" targetRef="Event_16twbu2" />
|
||||||
|
<bpmn:scriptTask id="script_one" name="script one">
|
||||||
|
<bpmn:extensionElements>
|
||||||
|
<spiffworkflow:instructionsForEndUser>We run script one</spiffworkflow:instructionsForEndUser>
|
||||||
|
</bpmn:extensionElements>
|
||||||
|
<bpmn:incoming>Flow_0jml23i</bpmn:incoming>
|
||||||
|
<bpmn:outgoing>Flow_0ula2mv</bpmn:outgoing>
|
||||||
|
<bpmn:script>a = 1</bpmn:script>
|
||||||
|
</bpmn:scriptTask>
|
||||||
|
<bpmn:scriptTask id="script_two" name="script two">
|
||||||
|
<bpmn:extensionElements>
|
||||||
|
<spiffworkflow:instructionsForEndUser>We run script two</spiffworkflow:instructionsForEndUser>
|
||||||
|
</bpmn:extensionElements>
|
||||||
|
<bpmn:incoming>Flow_0ula2mv</bpmn:incoming>
|
||||||
|
<bpmn:outgoing>Flow_0xzoduo</bpmn:outgoing>
|
||||||
|
</bpmn:scriptTask>
|
||||||
|
</bpmn:process>
|
||||||
|
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||||
|
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_6z10t9v">
|
||||||
|
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||||
|
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Event_16twbu2_di" bpmnElement="Event_16twbu2">
|
||||||
|
<dc:Bounds x="522" y="159" width="36" height="36" />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Activity_0ozwt2o_di" bpmnElement="script_one">
|
||||||
|
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||||
|
<bpmndi:BPMNLabel />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Activity_0vv06b3_di" bpmnElement="script_two">
|
||||||
|
<dc:Bounds x="400" y="137" width="100" height="80" />
|
||||||
|
<bpmndi:BPMNLabel />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_0jml23i_di" bpmnElement="Flow_0jml23i">
|
||||||
|
<di:waypoint x="215" y="177" />
|
||||||
|
<di:waypoint x="270" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_0ula2mv_di" bpmnElement="Flow_0ula2mv">
|
||||||
|
<di:waypoint x="370" y="177" />
|
||||||
|
<di:waypoint x="400" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_0xzoduo_di" bpmnElement="Flow_0xzoduo">
|
||||||
|
<di:waypoint x="500" y="177" />
|
||||||
|
<di:waypoint x="522" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
</bpmndi:BPMNPlane>
|
||||||
|
</bpmndi:BPMNDiagram>
|
||||||
|
</bpmn:definitions>
|
|
@ -0,0 +1,69 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||||
|
<bpmn:process id="Process_zaes0vi" isExecutable="true">
|
||||||
|
<bpmn:startEvent id="StartEvent_1">
|
||||||
|
<bpmn:outgoing>Flow_0903e0h</bpmn:outgoing>
|
||||||
|
</bpmn:startEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_0903e0h" sourceRef="StartEvent_1" targetRef="user_task_one" />
|
||||||
|
<bpmn:endEvent id="user_task_path_end_event" name="User task path end event">
|
||||||
|
<bpmn:incoming>Flow_1yn50r0</bpmn:incoming>
|
||||||
|
</bpmn:endEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_1yn50r0" sourceRef="user_task_one" targetRef="user_task_path_end_event" />
|
||||||
|
<bpmn:manualTask id="user_task_one" name="User task one">
|
||||||
|
<bpmn:incoming>Flow_0903e0h</bpmn:incoming>
|
||||||
|
<bpmn:outgoing>Flow_1yn50r0</bpmn:outgoing>
|
||||||
|
</bpmn:manualTask>
|
||||||
|
<bpmn:boundaryEvent id="user_task_timer_event" name="User task timer event" attachedToRef="user_task_one">
|
||||||
|
<bpmn:outgoing>Flow_1ky2hak</bpmn:outgoing>
|
||||||
|
<bpmn:timerEventDefinition id="TimerEventDefinition_12rb24v">
|
||||||
|
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">"PT10M"</bpmn:timeDuration>
|
||||||
|
</bpmn:timerEventDefinition>
|
||||||
|
</bpmn:boundaryEvent>
|
||||||
|
<bpmn:endEvent id="timer_event_path_end_event" name="Timer event path end event">
|
||||||
|
<bpmn:incoming>Flow_1ky2hak</bpmn:incoming>
|
||||||
|
</bpmn:endEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_1ky2hak" sourceRef="user_task_timer_event" targetRef="timer_event_path_end_event" />
|
||||||
|
</bpmn:process>
|
||||||
|
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||||
|
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_zaes0vi">
|
||||||
|
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||||
|
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Event_0668ivs_di" bpmnElement="user_task_path_end_event">
|
||||||
|
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||||
|
<bpmndi:BPMNLabel>
|
||||||
|
<dc:Bounds x="416" y="202" width="71" height="27" />
|
||||||
|
</bpmndi:BPMNLabel>
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Activity_0kov31h_di" bpmnElement="user_task_one">
|
||||||
|
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||||
|
<bpmndi:BPMNLabel />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Event_0nz6n0j_di" bpmnElement="timer_event_path_end_event">
|
||||||
|
<dc:Bounds x="402" y="282" width="36" height="36" />
|
||||||
|
<bpmndi:BPMNLabel>
|
||||||
|
<dc:Bounds x="379" y="325" width="83" height="27" />
|
||||||
|
</bpmndi:BPMNLabel>
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Event_1aw81go_di" bpmnElement="user_task_timer_event">
|
||||||
|
<dc:Bounds x="312" y="199" width="36" height="36" />
|
||||||
|
<bpmndi:BPMNLabel>
|
||||||
|
<dc:Bounds x="293" y="242" width="75" height="27" />
|
||||||
|
</bpmndi:BPMNLabel>
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_0903e0h_di" bpmnElement="Flow_0903e0h">
|
||||||
|
<di:waypoint x="215" y="177" />
|
||||||
|
<di:waypoint x="270" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_1yn50r0_di" bpmnElement="Flow_1yn50r0">
|
||||||
|
<di:waypoint x="370" y="177" />
|
||||||
|
<di:waypoint x="432" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_1ky2hak_di" bpmnElement="Flow_1ky2hak">
|
||||||
|
<di:waypoint x="330" y="235" />
|
||||||
|
<di:waypoint x="330" y="300" />
|
||||||
|
<di:waypoint x="402" y="300" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
</bpmndi:BPMNPlane>
|
||||||
|
</bpmndi:BPMNDiagram>
|
||||||
|
</bpmn:definitions>
|
|
@ -316,9 +316,7 @@ class BaseTest:
|
||||||
grant_type: str = "permit",
|
grant_type: str = "permit",
|
||||||
) -> UserModel:
|
) -> UserModel:
|
||||||
user = BaseTest.find_or_create_user(username=username)
|
user = BaseTest.find_or_create_user(username=username)
|
||||||
return cls.add_permissions_to_user(
|
return cls.add_permissions_to_user(user, target_uri=target_uri, permission_names=permission_names, grant_type=grant_type)
|
||||||
user, target_uri=target_uri, permission_names=permission_names, grant_type=grant_type
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def add_permissions_to_user(
|
def add_permissions_to_user(
|
||||||
|
@ -518,9 +516,7 @@ class BaseTest:
|
||||||
report_metadata=report_metadata,
|
report_metadata=report_metadata,
|
||||||
user=user,
|
user=user,
|
||||||
)
|
)
|
||||||
response = self.post_to_process_instance_list(
|
response = self.post_to_process_instance_list(client, user, report_metadata=process_instance_report.get_report_metadata())
|
||||||
client, user, report_metadata=process_instance_report.get_report_metadata()
|
|
||||||
)
|
|
||||||
|
|
||||||
if expect_to_find_instance is True:
|
if expect_to_find_instance is True:
|
||||||
assert len(response.json["results"]) == 1
|
assert len(response.json["results"]) == 1
|
||||||
|
|
|
@ -2,10 +2,10 @@ import ast
|
||||||
import base64
|
import base64
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from flask.app import Flask
|
from flask.app import Flask
|
||||||
from flask.testing import FlaskClient
|
from flask.testing import FlaskClient
|
||||||
|
from pytest_mock.plugin import MockerFixture
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
||||||
|
@ -122,7 +122,7 @@ class TestAuthentication(BaseTest):
|
||||||
def test_can_login_with_valid_user(
|
def test_can_login_with_valid_user(
|
||||||
self,
|
self,
|
||||||
app: Flask,
|
app: Flask,
|
||||||
mocker: Any,
|
mocker: MockerFixture,
|
||||||
client: FlaskClient,
|
client: FlaskClient,
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
|
@ -87,9 +87,7 @@ class TestForGoodErrors(BaseTest):
|
||||||
_dequeued_interstitial_stream(process_instance_id)
|
_dequeued_interstitial_stream(process_instance_id)
|
||||||
|
|
||||||
"""Returns the next available user task for a given process instance, if possible."""
|
"""Returns the next available user task for a given process instance, if possible."""
|
||||||
human_tasks = (
|
human_tasks = db.session.query(HumanTaskModel).filter(HumanTaskModel.process_instance_id == process_instance_id).all()
|
||||||
db.session.query(HumanTaskModel).filter(HumanTaskModel.process_instance_id == process_instance_id).all()
|
|
||||||
)
|
|
||||||
assert len(human_tasks) > 0, "No human tasks found for process."
|
assert len(human_tasks) > 0, "No human tasks found for process."
|
||||||
human_task = human_tasks[0]
|
human_task = human_tasks[0]
|
||||||
response = client.get(
|
response = client.get(
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue