merged in main and resolved conflicts
This commit is contained in:
commit
abacdf96e6
|
@ -11,6 +11,12 @@ repos:
|
|||
require_serial: true
|
||||
# exclude: ^migrations/
|
||||
exclude: "/migrations/"
|
||||
|
||||
# otherwise it will not fix long lines if the long lines contain long strings
|
||||
# https://github.com/psf/black/pull/1132
|
||||
# https://github.com/psf/black/pull/1609
|
||||
args: [--preview]
|
||||
|
||||
- id: check-added-large-files
|
||||
files: ^spiffworkflow-backend/
|
||||
name: Check for added large files
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
"${script_dir}/run_pyl" pre
|
50
bin/run_pyl
50
bin/run_pyl
|
@ -16,6 +16,17 @@ react_projects=(
|
|||
spiffworkflow-frontend
|
||||
)
|
||||
|
||||
subcommand="${1:-}"
|
||||
|
||||
if [[ "$subcommand" == "pre" ]]; then
|
||||
if [[ -n "$(git status --porcelain SpiffWorkflow)" ]]; then
|
||||
echo "SpiffWorkflow has uncommitted changes. Running its test suite."
|
||||
pushd SpiffWorkflow
|
||||
make tests-par # run tests in parallel
|
||||
popd
|
||||
fi
|
||||
fi
|
||||
|
||||
function get_python_dirs() {
|
||||
(git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo ''
|
||||
}
|
||||
|
@ -50,23 +61,34 @@ function run_pre_commmit() {
|
|||
}
|
||||
|
||||
for react_project in "${react_projects[@]}" ; do
|
||||
pushd "$react_project"
|
||||
npm run lint:fix
|
||||
popd
|
||||
# if pre, only do stuff when there are changes
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$react_project")" ]]; then
|
||||
pushd "$react_project"
|
||||
npm run lint:fix
|
||||
popd
|
||||
fi
|
||||
done
|
||||
|
||||
for python_project in "${python_projects[@]}" ; do
|
||||
pushd "$python_project"
|
||||
run_fix_docstrings || run_fix_docstrings
|
||||
run_autoflake || run_autoflake
|
||||
popd
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then
|
||||
pushd "$python_project"
|
||||
run_fix_docstrings || run_fix_docstrings
|
||||
run_autoflake || run_autoflake
|
||||
popd
|
||||
fi
|
||||
done
|
||||
run_pre_commmit || run_pre_commmit
|
||||
|
||||
for python_project in "${python_projects[@]}"; do
|
||||
pushd "$python_project"
|
||||
poetry install
|
||||
poetry run mypy $(get_python_dirs)
|
||||
poetry run coverage run --parallel -m pytest
|
||||
popd
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "spiffworkflow-backend")" ]]; then
|
||||
# rune_pre_commit only applies to spiffworkflow-backend at the moment
|
||||
run_pre_commmit || run_pre_commmit
|
||||
fi
|
||||
|
||||
for python_project in "${python_projects[@]}"; do
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then
|
||||
pushd "$python_project"
|
||||
poetry install
|
||||
poetry run mypy $(get_python_dirs)
|
||||
poetry run coverage run --parallel -m pytest
|
||||
popd
|
||||
fi
|
||||
done
|
||||
|
|
|
@ -10,9 +10,9 @@ services:
|
|||
environment:
|
||||
- MYSQL_DATABASE=spiffworkflow_backend_development
|
||||
- MYSQL_ROOT_PASSWORD=my-secret-pw
|
||||
- MYSQL_TCP_PORT=7003
|
||||
- MYSQL_TCP_PORT=8003
|
||||
ports:
|
||||
- "7003"
|
||||
- "8003"
|
||||
healthcheck:
|
||||
test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development
|
||||
interval: 10s
|
||||
|
@ -30,12 +30,12 @@ services:
|
|||
- SPIFFWORKFLOW_BACKEND_ENV=development
|
||||
- FLASK_DEBUG=0
|
||||
- FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||
- OPEN_ID_SERVER_URL=http://localhost:7000/openid
|
||||
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001
|
||||
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000
|
||||
- SPIFFWORKFLOW_BACKEND_PORT=7000
|
||||
- OPEN_ID_SERVER_URL=http://localhost:8000/openid
|
||||
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:8001
|
||||
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:8000
|
||||
- SPIFFWORKFLOW_BACKEND_PORT=8000
|
||||
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:8003/spiffworkflow_backend_development
|
||||
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false
|
||||
- SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml
|
||||
|
@ -43,12 +43,12 @@ services:
|
|||
- OPEN_ID_CLIENT_ID=spiffworkflow-backend
|
||||
- OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key
|
||||
ports:
|
||||
- "7000:7000"
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./process_models:/app/process_models
|
||||
- ./log:/app/log
|
||||
healthcheck:
|
||||
test: curl localhost:7000/v1.0/status --fail
|
||||
test: curl localhost:8000/v1.0/status --fail
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 20
|
||||
|
@ -58,9 +58,9 @@ services:
|
|||
image: ghcr.io/sartography/spiffworkflow-frontend
|
||||
environment:
|
||||
- APPLICATION_ROOT=/
|
||||
- PORT0=7001
|
||||
- PORT0=8001
|
||||
ports:
|
||||
- "7001:7001"
|
||||
- "8001:8001"
|
||||
|
||||
spiffworkflow-connector:
|
||||
container_name: spiffworkflow-connector
|
||||
|
@ -69,10 +69,11 @@ services:
|
|||
- FLASK_ENV=${FLASK_ENV:-development}
|
||||
- FLASK_DEBUG=0
|
||||
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
|
||||
- CONNECTOR_PROXY_PORT=8004
|
||||
ports:
|
||||
- "7004:7004"
|
||||
- "8004:8004"
|
||||
healthcheck:
|
||||
test: curl localhost:7004/liveness --fail
|
||||
test: curl localhost:8004/liveness --fail
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 20
|
||||
|
|
|
@ -813,22 +813,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.4.3"
|
||||
description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
pyyaml = ">=5.2"
|
||||
typing-extensions = ">=3.7.4.2"
|
||||
typing-inspect = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
|
||||
|
||||
[[package]]
|
||||
name = "livereload"
|
||||
version = "2.6.3"
|
||||
|
@ -905,18 +889,6 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "monkeytype"
|
||||
version = "22.2.0"
|
||||
description = "Generating type annotations from sampled production types"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
libcst = ">=0.3.7"
|
||||
mypy-extensions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.991"
|
||||
|
@ -1504,7 +1476,7 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.2.1"
|
||||
description = ""
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1520,7 +1492,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46"
|
||||
resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -1627,18 +1599,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspect"
|
||||
version = "0.7.1"
|
||||
description = "Runtime inspection utilities for typing module."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.3.0"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
version = "1.3.4"
|
||||
|
@ -1770,7 +1730,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d"
|
||||
content-hash = "45cac5741fa47e44710f5aae6dfdb4636fc4d60df2d6aba467052fdd5199e791"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2234,32 +2194,6 @@ lazy-object-proxy = [
|
|||
{file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"},
|
||||
{file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
|
||||
]
|
||||
libcst = [
|
||||
{file = "libcst-0.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bea98a8be2b1725784ae01e89519121eba7d81280dcbee40ae03ececd7277cf3"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d9191c764645dddf94d49885e590433fa0ee6d347b07eec86566786e6d2ada5"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f22e9787e44304e7cd9744e543602ab2c1bca8b922cb6237ea08d9a0be3fdd"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff147dd77b6ea72e4f2f0abfcd1be11a3108c28cb65e6da666c0b77142033f7c"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d744d4a6301c75322f1d88365dccfe402a51e724583a2edc4cba474462cc9419"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:ed0f15545eddfdd6270069ce0b2d4c253298817bd676a1a6adddaa1d66c7e28b"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6f57056a743853c01bbd21bfd96c2a1b4c317bbc66920f5f2c9999b3dca7233"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3d33da8f9b088e118bfc6ecacdd627ac237baeb490f4d7a383af4df4ea4f82"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df5f51a837fc10cdbf5c61acb467f6c15d5f9ca1d94a84a6a29c4f20ce7b437e"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f744f60057c8998b856d9baf28765c65574992f4a49830ca350010fc31f4eac4"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:88ab371aab82f7241448e263ec42abced649a77cdd21df960268e6df70b3f3f7"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:826ea5f10a84625db861ccf35946317f4f29e575261e44c0cd6c24c4dde5c2bb"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab5b23796ce66303398bb7b2d27bcb17d2416dacd3d00229c961aed87d79a3b"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afc793c95af79e5adc5905713ccddff034d0de3e3da748424b722edf890227de"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c982387b8e23ad18efbd0287004924931a0b05c91ed5630453faf224bb0b185"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4c25aca45df5f86a6a1c8c219e8c7a90acdaef02b53eb01eafa563381cb0ce"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1a395129ecf6c6ce429427f34100ccd99f35898a98187764a4559d9f92166cd0"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ca00819affafccb02b2582ec47706712b995c9887cad02bb8efe94a066830f37"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:231a9ca446570f9b63d8c2c6dbf6c796fb939a5e4ef9dc0dd9304a21a6c0da16"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b08e7a56950479c856183ad6fdf0a21df028d6732e1d19822ec1593e32f700ca"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cb70e7e5118234e75d309fcf04931e20f282f16c80dda464fc1b88ef02e52e4"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8c00b24ab39facff463b18b9abc8df7dd063ae0ce9fe2e78e199c9a8572e37"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:28f35b9a21b2f8982a8ed3f53b1fdbc5435252409d34d061a3229dc4b413b8c7"},
|
||||
{file = "libcst-0.4.3.tar.gz", hash = "sha256:f79ab61287505d97ed57ead14b78777f48cd6ec5339ca4978987e4c35957a465"},
|
||||
]
|
||||
livereload = [
|
||||
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
||||
]
|
||||
|
@ -2389,10 +2323,6 @@ mccabe = [
|
|||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
]
|
||||
monkeytype = [
|
||||
{file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"},
|
||||
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
|
||||
|
@ -2808,11 +2738,6 @@ typing-extensions = [
|
|||
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
|
||||
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
|
||||
]
|
||||
typing-inspect = [
|
||||
{file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"},
|
||||
{file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"},
|
||||
{file = "typing_inspect-0.7.1.tar.gz", hash = "sha256:047d4097d9b17f46531bf6f014356111a1b6fb821a24fe7ac909853ca2a782aa"},
|
||||
]
|
||||
unidecode = [
|
||||
{file = "Unidecode-1.3.4-py3-none-any.whl", hash = "sha256:afa04efcdd818a93237574791be9b2817d7077c25a068b00f8cff7baa4e59257"},
|
||||
{file = "Unidecode-1.3.4.tar.gz", hash = "sha256:8e4352fb93d5a735c788110d2e7ac8e8031eb06ccbfe8d324ab71735015f9342"},
|
||||
|
|
|
@ -170,15 +170,21 @@ def set_user_sentry_context() -> None:
|
|||
def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||
"""Handles unexpected exceptions."""
|
||||
set_user_sentry_context()
|
||||
id = capture_exception(exception)
|
||||
|
||||
organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
|
||||
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
|
||||
sentry_link = None
|
||||
if organization_slug and project_slug:
|
||||
sentry_link = (
|
||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
|
||||
id = capture_exception(exception)
|
||||
|
||||
if isinstance(exception, ApiError):
|
||||
current_app.logger.info(
|
||||
f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}")
|
||||
|
||||
organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
|
||||
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
|
||||
if organization_slug and project_slug:
|
||||
sentry_link = (
|
||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
|
|
|
@ -163,7 +163,7 @@ python-versions = "*"
|
|||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "22.10.0"
|
||||
version = "23.1a1"
|
||||
description = "The uncompromising code formatter."
|
||||
category = "dev"
|
||||
optional = false
|
||||
|
@ -614,7 +614,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
|
||||
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
|
||||
|
||||
[[package]]
|
||||
name = "flask-cors"
|
||||
|
@ -1760,7 +1760,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4"
|
||||
resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -2182,27 +2182,18 @@ billiard = [
|
|||
{file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"},
|
||||
]
|
||||
black = [
|
||||
{file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
|
||||
{file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
|
||||
{file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
|
||||
{file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
|
||||
{file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
|
||||
{file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
|
||||
{file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
|
||||
{file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
|
||||
{file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
|
||||
{file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
|
||||
{file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
|
||||
{file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
|
||||
{file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
|
||||
{file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
|
||||
{file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
|
||||
{file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
|
||||
{file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
|
||||
{file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
|
||||
{file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
|
||||
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
|
||||
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
|
||||
{file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"},
|
||||
{file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"},
|
||||
{file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"},
|
||||
{file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"},
|
||||
{file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"},
|
||||
{file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"},
|
||||
{file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"},
|
||||
{file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"},
|
||||
{file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"},
|
||||
{file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"},
|
||||
{file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"},
|
||||
{file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"},
|
||||
]
|
||||
blinker = [
|
||||
{file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"},
|
||||
|
@ -2857,7 +2848,18 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
.mypy_cache/
|
||||
/.idea/
|
||||
/.coverage
|
||||
/.coverage.*
|
||||
.coverage.*
|
||||
/.nox/
|
||||
/.python-version
|
||||
/.pytype/
|
||||
|
|
|
@ -9,7 +9,7 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
|
||||
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models"
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
|
||||
|
|
|
@ -7,7 +7,8 @@ def main() -> None:
|
|||
"""Main."""
|
||||
app = get_hacked_up_app_for_script()
|
||||
with app.app_context():
|
||||
AuthorizationService.delete_all_permissions_and_recreate()
|
||||
AuthorizationService.delete_all_permissions()
|
||||
AuthorizationService.import_permissions_from_yaml_file()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Get the bpmn process json for a given process instance id and store it in /tmp."""
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -18,15 +17,17 @@ def main(process_instance_id: str):
|
|||
id=process_instance_id
|
||||
).first()
|
||||
|
||||
file_path = f"/tmp/{process_instance_id}_bpmn_json.json"
|
||||
if not process_instance:
|
||||
raise Exception(
|
||||
f"Could not find a process instance with id: {process_instance_id}"
|
||||
)
|
||||
|
||||
with open(
|
||||
f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8"
|
||||
file_path, "w", encoding="utf-8"
|
||||
) as f:
|
||||
f.write(process_instance.bpmn_json)
|
||||
print(f"Saved to {file_path}")
|
||||
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
|
|
|
@ -7,4 +7,5 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
docker compose logs "$@"
|
||||
# "docker compose logs" is only getting the db logs so specify them both
|
||||
docker compose logs db spiffworkflow-backend
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
set -x
|
||||
mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;'
|
|
@ -11,26 +11,42 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
bpmn_models_absolute_dir="$1"
|
||||
git_commit_message="$2"
|
||||
git_commit_username="$3"
|
||||
git_commit_email="$4"
|
||||
git_branch="$3"
|
||||
git_commit_username="$4"
|
||||
git_commit_email="$5"
|
||||
git_commit_password="$6"
|
||||
|
||||
if [[ -z "${2:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]"
|
||||
if [[ -z "${6:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$bpmn_models_absolute_dir"
|
||||
git add .
|
||||
function failed_to_get_lock() {
|
||||
>&2 echo "ERROR: Failed to get lock."
|
||||
exit 1
|
||||
}
|
||||
|
||||
function run() {
|
||||
cd "$bpmn_models_absolute_dir"
|
||||
git add .
|
||||
|
||||
# https://unix.stackexchange.com/a/155077/456630
|
||||
if [ -z "$(git status --porcelain)" ]; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
PAT="${git_commit_username}:${git_commit_password}"
|
||||
AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n')
|
||||
|
||||
# https://unix.stackexchange.com/a/155077/456630
|
||||
if [ -z "$(git status --porcelain)" ]; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
if [[ -n "$git_commit_username" ]]; then
|
||||
git config --local user.name "$git_commit_username"
|
||||
fi
|
||||
if [[ -n "$git_commit_email" ]]; then
|
||||
git config --local user.email "$git_commit_email"
|
||||
git config --local http.extraHeader "Authorization: Basic $AUTH"
|
||||
git commit -m "$git_commit_message"
|
||||
git push --set-upstream origin "$git_branch"
|
||||
git config --unset --local http.extraHeader
|
||||
fi
|
||||
git commit -m "$git_commit_message"
|
||||
fi
|
||||
}
|
||||
|
||||
exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock
|
||||
flock --timeout 60 "$lock_fd" || failed_to_get_lock
|
||||
run
|
||||
flock -u "$lock_fd"
|
||||
|
|
|
@ -27,7 +27,6 @@ def main():
|
|||
"""Main."""
|
||||
app = get_hacked_up_app_for_script()
|
||||
with app.app_context():
|
||||
|
||||
process_model_identifier_ticket = "ticket"
|
||||
db.session.query(ProcessInstanceModel).filter(
|
||||
ProcessInstanceModel.process_model_identifier
|
||||
|
|
|
@ -40,7 +40,8 @@ def hello_world():
|
|||
return (
|
||||
'Hello, %s, <a href="/private">See private</a> '
|
||||
'<a href="/logout">Log out</a>'
|
||||
) % oidc.user_getfield("preferred_username")
|
||||
% oidc.user_getfield("preferred_username")
|
||||
)
|
||||
else:
|
||||
return 'Welcome anonymous, <a href="/private">Log in</a>'
|
||||
|
||||
|
|
|
@ -61,3 +61,7 @@ for task in $tasks; do
|
|||
done
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
|
||||
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
fi
|
||||
|
|
|
@ -426,6 +426,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "admin@spiffworkflow.org",
|
||||
"credentials" : [ {
|
||||
"id" : "ef435043-ef0c-407a-af5b-ced13182a408",
|
||||
"type" : "password",
|
||||
|
@ -446,6 +447,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "alex@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "81a61a3b-228d-42b3-b39a-f62d8e7f57ca",
|
||||
"type" : "password",
|
||||
|
@ -465,6 +467,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "amir@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "e589f3ad-bf7b-4756-89f7-7894c03c2831",
|
||||
"type" : "password",
|
||||
|
@ -484,6 +487,9 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "ciadmin1@spiffworkflow.org",
|
||||
"credentials" : [ {
|
||||
"id" : "111b5ea1-c2ab-470a-a16b-2373bc94de7a",
|
||||
"type" : "password",
|
||||
|
@ -499,28 +505,6 @@
|
|||
},
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "56457e8f-47c6-4f9f-a72b-473dea5edfeb",
|
||||
"createdTimestamp" : 1657139955336,
|
||||
"username" : "ciuser1",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"credentials" : [ {
|
||||
"id" : "762f36e9-47af-44da-8520-cf09d752497a",
|
||||
"type" : "password",
|
||||
"createdDate" : 1657139966468,
|
||||
"secretData" : "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"clientRoles" : {
|
||||
"spiffworkflow-backend" : [ "uma_protection" ]
|
||||
},
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "d58b61cc-a77e-488f-a427-05f4e0572e20",
|
||||
"createdTimestamp" : 1669132945413,
|
||||
|
@ -530,6 +514,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "core@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "ee80092b-8ee6-4699-8492-566e088b48f5",
|
||||
"type" : "password",
|
||||
|
@ -550,6 +535,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "dan@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "d517c520-f500-4542-80e5-7144daef1e32",
|
||||
"type" : "password",
|
||||
|
@ -569,6 +555,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "daniel@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "f240495c-265b-42fc-99db-46928580d07d",
|
||||
"type" : "password",
|
||||
|
@ -588,6 +575,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "elizabeth@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2",
|
||||
"type" : "password",
|
||||
|
@ -609,6 +597,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "fin@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "2379940c-98b4-481a-b629-0bd1a4e91acf",
|
||||
"type" : "password",
|
||||
|
@ -631,6 +620,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "fin1@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "96216746-ff72-454e-8288-232428d10b42",
|
||||
"type" : "password",
|
||||
|
@ -651,6 +641,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "finance_user1@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "f14722ec-13a7-4d35-a4ec-0475d405ae58",
|
||||
"type" : "password",
|
||||
|
@ -670,6 +661,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "harmeet@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "89c26090-9bd3-46ac-b038-883d02e3f125",
|
||||
"type" : "password",
|
||||
|
@ -691,6 +683,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "j@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
|
||||
"type" : "password",
|
||||
|
@ -711,6 +704,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jakub@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2",
|
||||
"type" : "password",
|
||||
|
@ -730,6 +724,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jarrad@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "113e0343-1069-476d-83f9-21d98edb9cfa",
|
||||
"type" : "password",
|
||||
|
@ -749,6 +744,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jason@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "40abf32e-f0cc-4a17-8231-1a69a02c1b0b",
|
||||
"type" : "password",
|
||||
|
@ -768,6 +764,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jon@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "8b520e01-5b9b-44ab-9ee8-505bd0831a45",
|
||||
"type" : "password",
|
||||
|
@ -787,6 +784,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "kb@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "2c0be363-038f-48f1-86d6-91fdd28657cf",
|
||||
"type" : "password",
|
||||
|
@ -808,6 +806,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "lead@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "96e836a4-1a84-45c5-a9ed-651b0c90195e",
|
||||
"type" : "password",
|
||||
|
@ -830,6 +829,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "lead1@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "4e17388b-6c44-44e1-b20a-a873c0feb9a8",
|
||||
"type" : "password",
|
||||
|
@ -850,6 +850,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "manuchehr@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "07dabf55-b5d3-4f98-abba-3334086ecf5e",
|
||||
"type" : "password",
|
||||
|
@ -869,6 +870,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "mike@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "1ed375fb-0f1a-4c2a-9243-2477242cf7bd",
|
||||
"type" : "password",
|
||||
|
@ -887,7 +889,10 @@
|
|||
"username" : "natalia",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"emailVerified" : true,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "natalia@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "b6aa9936-39cc-4931-bfeb-60e6753de5ba",
|
||||
"type" : "password",
|
||||
|
@ -907,6 +912,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "sasha@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "4a170af4-6f0c-4e7b-b70c-e674edf619df",
|
||||
"type" : "password",
|
||||
|
@ -926,6 +932,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "service-account@status.im",
|
||||
"serviceAccountClientId" : "spiffworkflow-backend",
|
||||
"credentials" : [ ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
|
@ -943,6 +950,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "service-account-withauth@status.im",
|
||||
"serviceAccountClientId" : "withAuth",
|
||||
"credentials" : [ ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
|
@ -2166,7 +2174,7 @@
|
|||
"subType" : "authenticated",
|
||||
"subComponents" : { },
|
||||
"config" : {
|
||||
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ]
|
||||
"allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ]
|
||||
}
|
||||
}, {
|
||||
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
||||
|
@ -2184,7 +2192,7 @@
|
|||
"subType" : "anonymous",
|
||||
"subComponents" : { },
|
||||
"config" : {
|
||||
"allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
|
||||
"allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
|
||||
}
|
||||
}, {
|
||||
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
||||
|
@ -2274,7 +2282,7 @@
|
|||
"internationalizationEnabled" : false,
|
||||
"supportedLocales" : [ ],
|
||||
"authenticationFlows" : [ {
|
||||
"id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3",
|
||||
"id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2",
|
||||
"alias" : "Account verification options",
|
||||
"description" : "Method with which to verity the existing account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2296,7 +2304,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "4da99e29-371e-4f4b-a863-e5079f30a714",
|
||||
"id" : "ddf80243-ec40-4c21-ae94-2967d841f84c",
|
||||
"alias" : "Authentication Options",
|
||||
"description" : "Authentication options.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2325,7 +2333,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "d398c928-e201-4e8b-ab09-289bb351cd2e",
|
||||
"id" : "4f075680-46b7-49eb-b94c-d7425f105cb9",
|
||||
"alias" : "Browser - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2347,7 +2355,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d",
|
||||
"id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed",
|
||||
"alias" : "Direct Grant - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2369,7 +2377,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "98013bc1-e4dd-41f7-9849-1f898143b944",
|
||||
"id" : "07536fec-8d41-4c73-845f-ca85002022e0",
|
||||
"alias" : "First broker login - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2391,7 +2399,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2",
|
||||
"id" : "f123f912-71fb-4596-97f9-c0628a59413d",
|
||||
"alias" : "Handle Existing Account",
|
||||
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2413,7 +2421,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "2470e6f4-9a01-476a-9057-75d78e577182",
|
||||
"id" : "03c26cc5-366b-462d-9297-b4016f8d7c57",
|
||||
"alias" : "Reset - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2435,7 +2443,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9",
|
||||
"id" : "1b4f474e-aa64-45cc-90f1-63504585d89c",
|
||||
"alias" : "User creation or linking",
|
||||
"description" : "Flow for the existing/non-existing user alternatives",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2458,7 +2466,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "97c83e43-cba8-4d92-b108-9181bca07a1e",
|
||||
"id" : "38024dd6-daff-45de-8782-06b07b7bfa56",
|
||||
"alias" : "Verify Existing Account by Re-authentication",
|
||||
"description" : "Reauthentication of existing account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2480,7 +2488,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "fbabd64c-20de-4b8c-bfd2-be6822572278",
|
||||
"id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7",
|
||||
"alias" : "browser",
|
||||
"description" : "browser based authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2516,7 +2524,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "0628a99f-b194-495d-8e54-cc4ca8684956",
|
||||
"id" : "92e3571d-ac3e-4e79-a391-5315954e866f",
|
||||
"alias" : "clients",
|
||||
"description" : "Base authentication for clients",
|
||||
"providerId" : "client-flow",
|
||||
|
@ -2552,7 +2560,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a",
|
||||
"id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f",
|
||||
"alias" : "direct grant",
|
||||
"description" : "OpenID Connect Resource Owner Grant",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2581,7 +2589,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa",
|
||||
"id" : "95d2f1ff-6907-47ce-a93c-db462fe04844",
|
||||
"alias" : "docker auth",
|
||||
"description" : "Used by Docker clients to authenticate against the IDP",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2596,7 +2604,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9",
|
||||
"id" : "27405ee8-5730-419c-944c-a7c67edd91ce",
|
||||
"alias" : "first broker login",
|
||||
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2619,7 +2627,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9",
|
||||
"id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8",
|
||||
"alias" : "forms",
|
||||
"description" : "Username, password, otp and other auth forms.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2641,7 +2649,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a",
|
||||
"id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9",
|
||||
"alias" : "http challenge",
|
||||
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2663,7 +2671,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03",
|
||||
"id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485",
|
||||
"alias" : "registration",
|
||||
"description" : "registration flow",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2679,7 +2687,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3",
|
||||
"id" : "99593c1e-f2a5-4198-ad41-634694259110",
|
||||
"alias" : "registration form",
|
||||
"description" : "registration form",
|
||||
"providerId" : "form-flow",
|
||||
|
@ -2715,7 +2723,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828",
|
||||
"id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4",
|
||||
"alias" : "reset credentials",
|
||||
"description" : "Reset credentials for a user if they forgot their password or something",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2751,7 +2759,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c",
|
||||
"id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8",
|
||||
"alias" : "saml ecp",
|
||||
"description" : "SAML ECP Profile Authentication Flow",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2767,13 +2775,13 @@
|
|||
} ]
|
||||
} ],
|
||||
"authenticatorConfig" : [ {
|
||||
"id" : "817a93da-29df-447f-ab05-cd9557e66745",
|
||||
"id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa",
|
||||
"alias" : "create unique user config",
|
||||
"config" : {
|
||||
"require.password.update.after.registration" : "false"
|
||||
}
|
||||
}, {
|
||||
"id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878",
|
||||
"id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2",
|
||||
"alias" : "review profile config",
|
||||
"config" : {
|
||||
"update.profile.on.first.login" : "missing"
|
||||
|
|
|
@ -18,7 +18,19 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
if ! docker network inspect spiffworkflow > /dev/null 2>&1; then
|
||||
docker network create spiffworkflow
|
||||
fi
|
||||
docker rm keycloak 2>/dev/null || echo 'no keycloak container found, safe to start new container'
|
||||
|
||||
# https://stackoverflow.com/a/60579344/6090676
|
||||
container_name="keycloak"
|
||||
if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then
|
||||
echo ":: Found container - $container_name"
|
||||
if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then
|
||||
echo ":: Stopping running container - $container_name"
|
||||
docker stop $container_name
|
||||
fi
|
||||
echo ":: Removing stopped container - $container_name"
|
||||
docker rm $container_name
|
||||
fi
|
||||
|
||||
docker run \
|
||||
-p 7002:8080 \
|
||||
-d \
|
||||
|
|
|
@ -9,7 +9,7 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
|
@ -47,7 +47,7 @@ def app() -> Flask:
|
|||
@pytest.fixture()
|
||||
def with_db_and_bpmn_file_cleanup() -> None:
|
||||
"""Process_group_resource."""
|
||||
db.session.query(ActiveTaskUserModel).delete()
|
||||
db.session.query(HumanTaskUserModel).delete()
|
||||
|
||||
for model in SpiffworkflowBaseDBModel._all_subclasses():
|
||||
db.session.query(model).delete()
|
||||
|
|
|
@ -68,7 +68,7 @@ services:
|
|||
- "7000:7000"
|
||||
network_mode: host
|
||||
volumes:
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ./log:/app/log
|
||||
healthcheck:
|
||||
test: curl localhost:7000/v1.0/status --fail
|
||||
|
@ -82,7 +82,7 @@ services:
|
|||
profiles:
|
||||
- debug
|
||||
volumes:
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ./:/app
|
||||
command: /app/bin/boot_in_docker_debug_mode
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 4d75421c0af0
|
||||
Revision ID: 907bcf0c3d75
|
||||
Revises:
|
||||
Create Date: 2022-12-06 17:42:56.417673
|
||||
Create Date: 2022-12-28 13:52:13.030028
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4d75421c0af0'
|
||||
revision = '907bcf0c3d75'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -72,14 +72,15 @@ def upgrade():
|
|||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('uid', sa.String(length=50), nullable=True),
|
||||
sa.Column('service', sa.String(length=50), nullable=False),
|
||||
sa.Column('service', sa.String(length=255), nullable=False),
|
||||
sa.Column('service_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('display_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('service', 'service_id', name='service_key'),
|
||||
sa.UniqueConstraint('uid')
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_table('message_correlation_property',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
|
@ -174,11 +175,20 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
|
||||
)
|
||||
op.create_table('active_task',
|
||||
op.create_table('user_group_assignment_waiting',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique')
|
||||
)
|
||||
op.create_table('human_task',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
|
||||
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
|
||||
sa.Column('form_file_name', sa.String(length=50), nullable=True),
|
||||
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
|
@ -189,12 +199,15 @@ def upgrade():
|
|||
sa.Column('task_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('task_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('completed', sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
|
||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='human_task_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False)
|
||||
op.create_table('message_correlation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
|
@ -255,23 +268,20 @@ def upgrade():
|
|||
sa.Column('spiff_step', sa.Integer(), nullable=False),
|
||||
sa.Column('task_json', sa.JSON(), nullable=False),
|
||||
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('active_task_user',
|
||||
op.create_table('human_task_user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('active_task_id', sa.Integer(), nullable=False),
|
||||
sa.Column('human_task_id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['active_task_id'], ['active_task.id'], ),
|
||||
sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('active_task_id', 'user_id', name='active_task_user_unique')
|
||||
sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False)
|
||||
op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False)
|
||||
op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False)
|
||||
op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False)
|
||||
op.create_table('message_correlation_message_instance',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_instance_id', sa.Integer(), nullable=False),
|
||||
|
@ -291,9 +301,9 @@ def downgrade():
|
|||
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
|
||||
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
|
||||
op.drop_table('message_correlation_message_instance')
|
||||
op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
|
||||
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
|
||||
op.drop_table('active_task_user')
|
||||
op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user')
|
||||
op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user')
|
||||
op.drop_table('human_task_user')
|
||||
op.drop_table('spiff_step_details')
|
||||
op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata')
|
||||
op.drop_table('process_instance_metadata')
|
||||
|
@ -304,7 +314,9 @@ def downgrade():
|
|||
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
|
||||
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
|
||||
op.drop_table('message_correlation')
|
||||
op.drop_table('active_task')
|
||||
op.drop_index(op.f('ix_human_task_completed'), table_name='human_task')
|
||||
op.drop_table('human_task')
|
||||
op.drop_table('user_group_assignment_waiting')
|
||||
op.drop_table('user_group_assignment')
|
||||
op.drop_table('secret')
|
||||
op.drop_table('refresh_token')
|
|
@ -654,7 +654,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
|
||||
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
|
||||
|
||||
[[package]]
|
||||
name = "Flask-Cors"
|
||||
|
@ -1851,7 +1851,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134"
|
||||
resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -18,11 +18,11 @@ from werkzeug.exceptions import NotFound
|
|||
|
||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||
from spiffworkflow_backend.config import setup_config
|
||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
|
||||
openid_blueprint,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||
from spiffworkflow_backend.routes.user import verify_token
|
||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
|
@ -93,7 +93,8 @@ def create_app() -> flask.app.Flask:
|
|||
|
||||
if os.environ.get("FLASK_SESSION_SECRET_KEY") is None:
|
||||
raise KeyError(
|
||||
"Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY"
|
||||
"Cannot find the secret_key from the environment. Please set"
|
||||
" FLASK_SESSION_SECRET_KEY"
|
||||
)
|
||||
|
||||
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")
|
||||
|
@ -103,7 +104,6 @@ def create_app() -> flask.app.Flask:
|
|||
migrate.init_app(app, db)
|
||||
|
||||
app.register_blueprint(user_blueprint)
|
||||
app.register_blueprint(process_api_blueprint)
|
||||
app.register_blueprint(api_error_blueprint)
|
||||
app.register_blueprint(admin_blueprint, url_prefix="/admin")
|
||||
app.register_blueprint(openid_blueprint, url_prefix="/openid")
|
||||
|
@ -117,7 +117,7 @@ def create_app() -> flask.app.Flask:
|
|||
]
|
||||
CORS(app, origins=origins_re, max_age=3600)
|
||||
|
||||
connexion_app.add_api("api.yml", base_path="/v1.0")
|
||||
connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
|
||||
|
||||
mail = Mail(app)
|
||||
app.config["MAIL_APP"] = mail
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None:
|
|||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
||||
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
)
|
||||
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
)
|
||||
else:
|
||||
# use pswd to trick flake8 with hardcoded passwords
|
||||
db_pswd = os.environ.get("DB_PASSWORD")
|
||||
if db_pswd is None:
|
||||
db_pswd = ""
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||
)
|
||||
else:
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
|
||||
|
@ -42,6 +42,7 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
|
|||
"""Load_config_file."""
|
||||
try:
|
||||
app.config.from_object(env_config_module)
|
||||
print(f"loaded config: {env_config_module}")
|
||||
except ImportStringError as exception:
|
||||
if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
|
||||
raise ModuleNotFoundError(
|
||||
|
@ -62,6 +63,7 @@ def setup_config(app: Flask) -> None:
|
|||
)
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
app.config.from_object("spiffworkflow_backend.config.default")
|
||||
print("loaded config: default")
|
||||
|
||||
env_config_prefix = "spiffworkflow_backend.config."
|
||||
if (
|
||||
|
@ -69,6 +71,7 @@ def setup_config(app: Flask) -> None:
|
|||
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
|
||||
):
|
||||
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
|
||||
print("loaded config: terraform_deployed_environment")
|
||||
|
||||
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
|
||||
load_config_file(app, env_config_module)
|
||||
|
@ -87,6 +90,14 @@ def setup_config(app: Flask) -> None:
|
|||
"permissions",
|
||||
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
|
||||
)
|
||||
print(
|
||||
"set permissions file name config:"
|
||||
f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}"
|
||||
)
|
||||
print(
|
||||
"set permissions file name full path:"
|
||||
f" {app.config['PERMISSIONS_FILE_FULLPATH']}"
|
||||
)
|
||||
|
||||
# unversioned (see .gitignore) config that can override everything and include secrets.
|
||||
# src/spiffworkflow_backend/config/secrets.py
|
||||
|
|
|
@ -27,8 +27,6 @@ CONNECTOR_PROXY_URL = environ.get(
|
|||
"CONNECTOR_PROXY_URL", default="http://localhost:7004"
|
||||
)
|
||||
|
||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
|
||||
# Open ID server
|
||||
OPEN_ID_SERVER_URL = environ.get(
|
||||
"OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow"
|
||||
|
@ -63,7 +61,10 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
|||
|
||||
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
|
||||
# I.e., dev server could have `staging` here. Staging server might have `production` here.
|
||||
GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO")
|
||||
GIT_BRANCH = environ.get("GIT_BRANCH")
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
|
||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
|
||||
# Datbase Configuration
|
||||
SPIFF_DATABASE_TYPE = environ.get(
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
"""Dev."""
|
||||
from os import environ
|
||||
|
||||
GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
|
||||
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
|
||||
GIT_USER_EMAIL = environ.get(
|
||||
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml"
|
||||
|
|
|
@ -12,3 +12,8 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
|||
RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="true") == "true"
|
||||
)
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
|
||||
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
|
||||
)
|
||||
GIT_USERNAME = "sartography-automated-committer"
|
||||
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
groups:
|
||||
admin:
|
||||
users: [ciadmin1]
|
||||
|
||||
common-user:
|
||||
users: [ciuser1]
|
||||
users: [ciadmin1@spiffworkflow.org]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin, common-user]
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete, list, instantiate]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
default_group: everybody
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users:
|
||||
[
|
||||
admin@spiffworkflow.org,
|
||||
jakub@status.im,
|
||||
jarrad@status.im,
|
||||
kb@sartography.com,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub@status.im,
|
||||
amir@status.im,
|
||||
jarrad@status.im,
|
||||
sasha@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
harmeet@status.im,
|
||||
sasha@status.im,
|
||||
manuchehr@status.im,
|
||||
core@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
lead@status.im,
|
||||
lead1@status.im,
|
||||
]
|
||||
|
||||
test:
|
||||
users:
|
||||
[
|
||||
natalia@sartography.com,
|
||||
]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
# open system defaults for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-models/*
|
||||
|
||||
# basic perms for everybody
|
||||
read-all-process-instances-for-me:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /processes
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /service-tasks
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
user-groups-for-current-user:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /user-groups/for-current-user
|
||||
|
||||
|
||||
finance-admin:
|
||||
groups: ["Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-groups/manage-procurement:procurement:*
|
||||
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
manage-revenue-streams-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
create-test-instances:
|
||||
groups: ["test"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /process-instances/misc:test:*
|
|
@ -10,57 +10,60 @@ groups:
|
|||
admin:
|
||||
users:
|
||||
[
|
||||
admin,
|
||||
jakub,
|
||||
kb,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
admin@spiffworkflow.org,
|
||||
jakub@status.im,
|
||||
jarrad@status.im,
|
||||
kb@sartography.com,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
amir,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
sasha,
|
||||
fin,
|
||||
fin1,
|
||||
jakub@status.im,
|
||||
amir@status.im,
|
||||
jarrad@status.im,
|
||||
sasha@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
fin,
|
||||
fin1,
|
||||
harmeet,
|
||||
sasha,
|
||||
manuchehr,
|
||||
lead,
|
||||
lead1
|
||||
harmeet@status.im,
|
||||
sasha@status.im,
|
||||
manuchehr@status.im,
|
||||
core@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
lead@status.im,
|
||||
lead1@status.im,
|
||||
]
|
||||
|
||||
core-contributor:
|
||||
test:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
harmeet,
|
||||
natalia@sartography.com,
|
||||
]
|
||||
|
||||
admin-ro:
|
||||
users:
|
||||
[
|
||||
j@sartography.com,
|
||||
]
|
||||
|
||||
permissions:
|
||||
|
@ -69,135 +72,102 @@ permissions:
|
|||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
admin-readonly:
|
||||
groups: [admin-ro]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/service-tasks
|
||||
uri: /*
|
||||
admin-process-instances-for-readonly:
|
||||
groups: [admin-ro]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/*
|
||||
|
||||
|
||||
# read all for everybody
|
||||
# open system defaults for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-groups/*
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-models/*
|
||||
read-all-process-instance:
|
||||
uri: /process-models/*
|
||||
|
||||
# basic perms for everybody
|
||||
read-all-process-instances-for-me:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/*
|
||||
uri: /process-instances/for-me/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/reports/*
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/processes
|
||||
|
||||
task-data-read:
|
||||
groups: [demo]
|
||||
uri: /processes
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/task-data/*
|
||||
uri: /service-tasks
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
user-groups-for-current-user:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /user-groups/for-current-user
|
||||
|
||||
|
||||
manage-procurement-admin:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:*
|
||||
manage-procurement-admin-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement/*
|
||||
manage-procurement-admin-models:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement:*
|
||||
manage-procurement-admin-models-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement/*
|
||||
manage-procurement-admin-instances:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement:*
|
||||
manage-procurement-admin-instances-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement/*
|
||||
|
||||
finance-admin:
|
||||
groups: ["Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:procurement:*
|
||||
uri: /process-groups/manage-procurement:procurement:*
|
||||
|
||||
manage-revenue-streams-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
|
||||
manage-procurement-invoice-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
|
||||
manage-procurement-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:*
|
||||
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
core1-admin-models-instantiate:
|
||||
groups: ["core-contributor", "Finance Team"]
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/misc:category_number_one:process-model-with-form/process-instances
|
||||
core1-admin-instances:
|
||||
groups: ["core-contributor", "Finance Team"]
|
||||
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
manage-revenue-streams-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
create-test-instances:
|
||||
groups: ["test"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:*
|
||||
core1-admin-instances-slash:
|
||||
groups: ["core-contributor", "Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
|
||||
uri: /process-instances/misc:test:*
|
||||
|
|
|
@ -2,14 +2,17 @@ default_group: everybody
|
|||
|
||||
users:
|
||||
admin:
|
||||
service: local_open_id
|
||||
email: admin@spiffworkflow.org
|
||||
password: admin
|
||||
preferred_username: Admin
|
||||
nelson:
|
||||
service: local_open_id
|
||||
email: nelson@spiffworkflow.org
|
||||
password: nelson
|
||||
preferred_username: Nelson
|
||||
malala:
|
||||
service: local_open_id
|
||||
email: malala@spiffworkflow.org
|
||||
password: malala
|
||||
preferred_username: Malala
|
||||
|
@ -18,17 +21,17 @@ groups:
|
|||
admin:
|
||||
users:
|
||||
[
|
||||
admin,
|
||||
admin@spiffworkflow.org,
|
||||
]
|
||||
Education:
|
||||
users:
|
||||
[
|
||||
malala
|
||||
malala@spiffworkflow.org
|
||||
]
|
||||
President:
|
||||
users:
|
||||
[
|
||||
nelson
|
||||
nelson@spiffworkflow.org
|
||||
]
|
||||
|
||||
permissions:
|
||||
|
@ -44,45 +47,44 @@ permissions:
|
|||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
uri: /tasks/*
|
||||
|
||||
# Everyone can see everything (all groups, and processes are visible)
|
||||
read-all-process-groups:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-groups/*
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-models/*
|
||||
uri: /process-models/*
|
||||
read-all-process-instance:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-instances/*
|
||||
uri: /process-instances/*
|
||||
read-process-instance-reports:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-instances/reports/*
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/processes
|
||||
|
||||
# Members of the Education group can change they processes work.
|
||||
uri: /processes
|
||||
# Members of the Education group can change the processes under "education".
|
||||
education-admin:
|
||||
groups: ["Education", "President"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/education:*
|
||||
uri: /process-groups/education:*
|
||||
|
||||
# Anyone can start an education process.
|
||||
education-everybody:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
|
||||
uri: /process-instances/misc:category_number_one:process-model-with-form/*
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
default_group: everybody
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users: [admin@spiffworkflow.org]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
|
@ -0,0 +1,148 @@
|
|||
default_group: everybody
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users:
|
||||
[
|
||||
admin@spiffworkflow.org,
|
||||
jakub@status.im,
|
||||
jarrad@status.im,
|
||||
kb@sartography.com,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub@status.im,
|
||||
amir@status.im,
|
||||
jarrad@status.im,
|
||||
sasha@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
harmeet@status.im,
|
||||
sasha@status.im,
|
||||
manuchehr@status.im,
|
||||
core@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
lead@status.im,
|
||||
lead1@status.im,
|
||||
]
|
||||
test:
|
||||
users:
|
||||
[
|
||||
natalia@sartography.com,
|
||||
]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /*
|
||||
admin-process-instances:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/*
|
||||
|
||||
# open system defaults for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-models/*
|
||||
|
||||
# basic perms for everybody
|
||||
read-all-process-instances-for-me:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /processes
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /service-tasks
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
user-groups-for-current-user:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /user-groups/for-current-user
|
||||
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
manage-revenue-streams-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
create-test-instances:
|
||||
groups: ["test"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /process-instances/misc:test:*
|
|
@ -2,60 +2,7 @@ default_group: everybody
|
|||
|
||||
groups:
|
||||
admin:
|
||||
users:
|
||||
[
|
||||
admin,
|
||||
jakub,
|
||||
kb,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
amir,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
sasha,
|
||||
fin,
|
||||
fin1,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
fin,
|
||||
fin1,
|
||||
harmeet,
|
||||
sasha,
|
||||
manuchehr,
|
||||
lead,
|
||||
lead1
|
||||
]
|
||||
|
||||
core-contributor:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
harmeet,
|
||||
]
|
||||
users: [admin@spiffworkflow.org]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
|
@ -63,120 +10,3 @@ permissions:
|
|||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/service-tasks
|
||||
|
||||
|
||||
# read all for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-models/*
|
||||
read-all-process-instance:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/processes
|
||||
|
||||
task-data-read:
|
||||
groups: [demo]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/task-data/*
|
||||
|
||||
|
||||
manage-procurement-admin:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:*
|
||||
manage-procurement-admin-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement/*
|
||||
manage-procurement-admin-models:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement:*
|
||||
manage-procurement-admin-models-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement/*
|
||||
manage-procurement-admin-instances:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement:*
|
||||
manage-procurement-admin-instances-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement/*
|
||||
|
||||
finance-admin:
|
||||
groups: ["Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:procurement:*
|
||||
|
||||
manage-revenue-streams-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
|
||||
manage-procurement-invoice-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
|
||||
manage-procurement-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
default_group: everybody
|
||||
|
||||
users:
|
||||
testadmin1:
|
||||
service: https://testing/openid/thing
|
||||
email: testadmin1@spiffworkflow.org
|
||||
password: admin
|
||||
preferred_username: El administrador de la muerte
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users: [testadmin1, testadmin2]
|
||||
|
@ -14,7 +21,7 @@ permissions:
|
|||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete, list, instantiate]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
read-all:
|
||||
|
@ -27,29 +34,29 @@ permissions:
|
|||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
uri: /tasks/*
|
||||
|
||||
# TODO: all uris should really have the same structure
|
||||
finance-admin-group:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/finance/*
|
||||
uri: /process-groups/finance/*
|
||||
|
||||
finance-admin-model:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/finance/*
|
||||
uri: /process-models/finance/*
|
||||
|
||||
finance-admin-model-lanes:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/finance:model_with_lanes/*
|
||||
uri: /process-models/finance:model_with_lanes/*
|
||||
|
||||
finance-admin-instance-run:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/*
|
||||
uri: /process-instances/*
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
"""Qa1."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2")
|
||||
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
|
||||
GIT_USER_EMAIL = environ.get(
|
||||
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
|
||||
)
|
|
@ -0,0 +1,7 @@
|
|||
"""Staging."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH = environ.get("GIT_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
|
||||
GIT_COMMIT_ON_SAVE = False
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"
|
|
@ -5,8 +5,8 @@ from os import environ
|
|||
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
||||
|
||||
GIT_COMMIT_ON_SAVE = True
|
||||
GIT_USERNAME = environment_identifier_for_this_config_file_only
|
||||
GIT_USER_EMAIL = f"{environment_identifier_for_this_config_file_only}@example.com"
|
||||
GIT_USERNAME = "sartography-automated-committer"
|
||||
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
|
||||
default="terraform_deployed_environment.yml",
|
||||
|
@ -24,3 +24,6 @@ SPIFFWORKFLOW_BACKEND_URL = (
|
|||
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
CONNECTOR_PROXY_URL = f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
|
||||
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
|
||||
)
|
||||
|
|
|
@ -15,6 +15,7 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
GIT_COMMIT_ON_SAVE = False
|
||||
|
||||
# NOTE: set this here since nox shoves tests and src code to
|
||||
# different places and this allows us to know exactly where we are at the start
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
"""Api_version."""
|
||||
V1_API_PATH_PREFIX = "/v1.0"
|
|
@ -17,7 +17,7 @@ from spiffworkflow_backend.models.user_group_assignment import (
|
|||
from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401
|
||||
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.spec_reference import (
|
||||
SpecReferenceCache,
|
||||
) # noqa: F401
|
||||
|
|
|
@ -27,6 +27,9 @@ class GroupModel(FlaskBpmnGroupModel):
|
|||
identifier = db.Column(db.String(255))
|
||||
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
|
||||
user_group_assignments_waiting = relationship( # type: ignore
|
||||
"UserGroupAssignmentWaitingModel", cascade="delete"
|
||||
)
|
||||
users = relationship( # type: ignore
|
||||
"UserModel",
|
||||
viewonly=True,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Active_task."""
|
||||
"""Human_task."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
@ -8,7 +8,6 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import RelationshipProperty
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
@ -17,29 +16,30 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.active_task_user import ( # noqa: F401
|
||||
ActiveTaskUserModel,
|
||||
from spiffworkflow_backend.models.human_task_user import ( # noqa: F401
|
||||
HumanTaskUserModel,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
||||
"""ActiveTaskModel."""
|
||||
class HumanTaskModel(SpiffworkflowBaseDBModel):
|
||||
"""HumanTaskModel."""
|
||||
|
||||
__tablename__ = "active_task"
|
||||
__tablename__ = "human_task"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"task_id", "process_instance_id", name="active_task_unique"
|
||||
),
|
||||
db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"),
|
||||
)
|
||||
|
||||
actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
|
||||
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id))
|
||||
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
|
||||
|
||||
actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
|
||||
# actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
|
||||
|
||||
form_file_name: str | None = db.Column(db.String(50))
|
||||
ui_form_file_name: str | None = db.Column(db.String(50))
|
||||
|
||||
|
@ -52,17 +52,18 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
|||
task_type: str = db.Column(db.String(50))
|
||||
task_status: str = db.Column(db.String(50))
|
||||
process_model_display_name: str = db.Column(db.String(255))
|
||||
completed: bool = db.Column(db.Boolean, default=False, nullable=False, index=True)
|
||||
|
||||
active_task_users = relationship("ActiveTaskUserModel", cascade="delete")
|
||||
human_task_users = relationship("HumanTaskUserModel", cascade="delete")
|
||||
potential_owners = relationship( # type: ignore
|
||||
"UserModel",
|
||||
viewonly=True,
|
||||
secondary="active_task_user",
|
||||
overlaps="active_task_user,users",
|
||||
secondary="human_task_user",
|
||||
overlaps="human_task_user,users",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def to_task(cls, task: ActiveTaskModel) -> Task:
|
||||
def to_task(cls, task: HumanTaskModel) -> Task:
|
||||
"""To_task."""
|
||||
new_task = Task(
|
||||
task.task_id,
|
||||
|
@ -79,7 +80,7 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
|||
if hasattr(task, "process_model_identifier"):
|
||||
new_task.process_model_identifier = task.process_model_identifier
|
||||
|
||||
# active tasks only have status when getting the list on the home page
|
||||
# human tasks only have status when getting the list on the home page
|
||||
# and it comes from the process_instance. it should not be confused with task_status.
|
||||
if hasattr(task, "status"):
|
||||
new_task.process_instance_status = task.status
|
|
@ -1,4 +1,4 @@
|
|||
"""Active_task_user."""
|
||||
"""Human_task_user."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
@ -7,26 +7,26 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActiveTaskUserModel(SpiffworkflowBaseDBModel):
|
||||
"""ActiveTaskUserModel."""
|
||||
class HumanTaskUserModel(SpiffworkflowBaseDBModel):
|
||||
"""HumanTaskUserModel."""
|
||||
|
||||
__tablename__ = "active_task_user"
|
||||
__tablename__ = "human_task_user"
|
||||
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"active_task_id",
|
||||
"human_task_id",
|
||||
"user_id",
|
||||
name="active_task_user_unique",
|
||||
name="human_task_user_unique",
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
active_task_id = db.Column(
|
||||
ForeignKey(ActiveTaskModel.id), nullable=False, index=True # type: ignore
|
||||
human_task_id = db.Column(
|
||||
ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
|
|
@ -86,5 +86,6 @@ def ensure_failure_cause_is_set_if_message_instance_failed(
|
|||
if isinstance(instance, MessageInstanceModel):
|
||||
if instance.status == "failed" and instance.failure_cause is None:
|
||||
raise ValueError(
|
||||
f"{instance.__class__.__name__}: failure_cause must be set if status is failed"
|
||||
f"{instance.__class__.__name__}: failure_cause must be set if"
|
||||
" status is failed"
|
||||
)
|
||||
|
|
|
@ -32,14 +32,6 @@ class Permission(enum.Enum):
|
|||
update = "update"
|
||||
delete = "delete"
|
||||
|
||||
# maybe read to GET process_model/process-instances instead?
|
||||
list = "list"
|
||||
|
||||
# maybe use create instead on
|
||||
# POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/*
|
||||
# POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/332/run
|
||||
instantiate = "instantiate" # this is something you do to a process model
|
||||
|
||||
|
||||
class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
|
||||
"""PermissionAssignmentModel."""
|
||||
|
|
|
@ -26,34 +26,12 @@ class ProcessInstanceNotFoundError(Exception):
|
|||
"""ProcessInstanceNotFoundError."""
|
||||
|
||||
|
||||
class NavigationItemSchema(Schema):
|
||||
"""NavigationItemSchema."""
|
||||
class ProcessInstanceTaskDataCannotBeUpdatedError(Exception):
|
||||
"""ProcessInstanceTaskDataCannotBeUpdatedError."""
|
||||
|
||||
class Meta:
|
||||
"""Meta."""
|
||||
|
||||
fields = [
|
||||
"spec_id",
|
||||
"name",
|
||||
"spec_type",
|
||||
"task_id",
|
||||
"description",
|
||||
"backtracks",
|
||||
"indent",
|
||||
"lane",
|
||||
"state",
|
||||
"children",
|
||||
]
|
||||
unknown = INCLUDE
|
||||
|
||||
state = marshmallow.fields.String(required=False, allow_none=True)
|
||||
description = marshmallow.fields.String(required=False, allow_none=True)
|
||||
backtracks = marshmallow.fields.String(required=False, allow_none=True)
|
||||
lane = marshmallow.fields.String(required=False, allow_none=True)
|
||||
task_id = marshmallow.fields.String(required=False, allow_none=True)
|
||||
children = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested(lambda: NavigationItemSchema())
|
||||
)
|
||||
class ProcessInstanceCannotBeDeletedError(Exception):
|
||||
"""ProcessInstanceCannotBeDeletedError."""
|
||||
|
||||
|
||||
class ProcessInstanceStatus(SpiffEnum):
|
||||
|
@ -82,7 +60,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
|
||||
process_initiator = relationship("UserModel")
|
||||
|
||||
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
|
||||
active_human_tasks = relationship(
|
||||
"HumanTaskModel",
|
||||
primaryjoin=(
|
||||
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id,"
|
||||
" HumanTaskModel.completed == False)"
|
||||
),
|
||||
) # type: ignore
|
||||
|
||||
human_tasks = relationship(
|
||||
"HumanTaskModel",
|
||||
cascade="delete",
|
||||
overlaps="active_human_tasks",
|
||||
) # type: ignore
|
||||
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
|
||||
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
|
||||
|
||||
|
@ -93,7 +83,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
status: str = db.Column(db.String(50))
|
||||
|
||||
bpmn_xml_file_contents: bytes | None = None
|
||||
bpmn_xml_file_contents: str | None = None
|
||||
bpmn_version_control_type: str = db.Column(db.String(50))
|
||||
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
||||
spiff_step: int = db.Column(db.Integer)
|
||||
|
@ -101,9 +91,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
@property
|
||||
def serialized(self) -> dict[str, Any]:
|
||||
"""Return object data in serializeable format."""
|
||||
local_bpmn_xml_file_contents = ""
|
||||
if self.bpmn_xml_file_contents:
|
||||
local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8")
|
||||
return {
|
||||
"id": self.id,
|
||||
"process_model_identifier": self.process_model_identifier,
|
||||
|
@ -112,7 +99,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"start_in_seconds": self.start_in_seconds,
|
||||
"end_in_seconds": self.end_in_seconds,
|
||||
"process_initiator_id": self.process_initiator_id,
|
||||
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
|
||||
"bpmn_xml_file_contents": self.bpmn_xml_file_contents,
|
||||
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
||||
"bpmn_version_control_type": self.bpmn_version_control_type,
|
||||
"spiff_step": self.spiff_step,
|
||||
|
@ -134,6 +121,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"""Validate_status."""
|
||||
return self.validate_enum_field(key, value, ProcessInstanceStatus)
|
||||
|
||||
def can_submit_task(self) -> bool:
|
||||
"""Can_submit_task."""
|
||||
return not self.has_terminal_status() and self.status != "suspended"
|
||||
|
||||
def has_terminal_status(self) -> bool:
|
||||
"""Has_terminal_status."""
|
||||
return self.status in self.terminal_statuses()
|
||||
|
||||
@classmethod
|
||||
def terminal_statuses(cls) -> list[str]:
|
||||
"""Terminal_statuses."""
|
||||
return ["complete", "error", "terminated"]
|
||||
|
||||
|
||||
class ProcessInstanceModelSchema(Schema):
|
||||
"""ProcessInstanceModelSchema."""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Spiff_step_details."""
|
||||
"""Process_instance_metadata."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
|
|
|
@ -8,6 +8,10 @@ from marshmallow import INCLUDE
|
|||
from sqlalchemy import UniqueConstraint
|
||||
|
||||
|
||||
class SpecReferenceNotFoundError(Exception):
|
||||
"""SpecReferenceNotFoundError."""
|
||||
|
||||
|
||||
@dataclass()
|
||||
class SpecReference:
|
||||
"""File Reference Information.
|
||||
|
|
|
@ -8,7 +8,7 @@ from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
|||
|
||||
@dataclass
|
||||
class SpiffLoggingModel(SpiffworkflowBaseDBModel):
|
||||
"""LoggingModel."""
|
||||
"""SpiffLoggingModel."""
|
||||
|
||||
__tablename__ = "spiff_logging"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
"""Spiff_step_details."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
|
@ -20,10 +18,13 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
|||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
# human_task_id: int = db.Column(
|
||||
# ForeignKey(HumanTaskModel.id) # type: ignore
|
||||
# )
|
||||
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||
task_json: str = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
completed_by_user_id: int = db.Column(db.Integer, nullable=True)
|
||||
lane_assignment_id: Optional[int] = db.Column(
|
||||
ForeignKey(GroupModel.id), nullable=True
|
||||
)
|
||||
# completed_by_user_id: int = db.Column(db.Integer, nullable=True)
|
||||
# lane_assignment_id: Optional[int] = db.Column(
|
||||
# ForeignKey(GroupModel.id), nullable=True
|
||||
# )
|
||||
|
|
|
@ -43,8 +43,8 @@ class Task:
|
|||
FIELD_TYPE_EMAIL = "email" # email: Email address
|
||||
FIELD_TYPE_URL = "url" # url: Website address
|
||||
|
||||
FIELD_PROP_AUTO_COMPLETE_MAX = (
|
||||
"autocomplete_num" # Not used directly, passed in from the front end.
|
||||
FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end.
|
||||
"autocomplete_num"
|
||||
)
|
||||
|
||||
# Required field
|
||||
|
@ -77,8 +77,8 @@ class Task:
|
|||
|
||||
# File specific field properties
|
||||
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
|
||||
FIELD_PROP_FILE_DATA = (
|
||||
"file_data" # to associate a bit of data with a specific file upload file.
|
||||
FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file.
|
||||
"file_data"
|
||||
)
|
||||
|
||||
# Additional properties
|
||||
|
@ -108,7 +108,7 @@ class Task:
|
|||
multi_instance_type: Union[MultiInstanceType, None] = None,
|
||||
multi_instance_count: str = "",
|
||||
multi_instance_index: str = "",
|
||||
process_name: str = "",
|
||||
process_identifier: str = "",
|
||||
properties: Union[dict, None] = None,
|
||||
process_instance_id: Union[int, None] = None,
|
||||
process_instance_status: Union[str, None] = None,
|
||||
|
@ -118,6 +118,8 @@ class Task:
|
|||
form_schema: Union[str, None] = None,
|
||||
form_ui_schema: Union[str, None] = None,
|
||||
parent: Optional[str] = None,
|
||||
event_definition: Union[dict[str, Any], None] = None,
|
||||
call_activity_process_identifier: Optional[str] = None,
|
||||
):
|
||||
"""__init__."""
|
||||
self.id = id
|
||||
|
@ -129,6 +131,8 @@ class Task:
|
|||
self.documentation = documentation
|
||||
self.lane = lane
|
||||
self.parent = parent
|
||||
self.event_definition = event_definition
|
||||
self.call_activity_process_identifier = call_activity_process_identifier
|
||||
|
||||
self.data = data
|
||||
if self.data is None:
|
||||
|
@ -151,7 +155,7 @@ class Task:
|
|||
self.multi_instance_index = (
|
||||
multi_instance_index # And the index of the currently repeating task.
|
||||
)
|
||||
self.process_name = process_name
|
||||
self.process_identifier = process_identifier
|
||||
|
||||
self.properties = properties # Arbitrary extension properties from BPMN editor.
|
||||
if self.properties is None:
|
||||
|
@ -177,7 +181,7 @@ class Task:
|
|||
"multi_instance_type": multi_instance_type,
|
||||
"multi_instance_count": self.multi_instance_count,
|
||||
"multi_instance_index": self.multi_instance_index,
|
||||
"process_name": self.process_name,
|
||||
"process_identifier": self.process_identifier,
|
||||
"properties": self.properties,
|
||||
"process_instance_id": self.process_instance_id,
|
||||
"process_instance_status": self.process_instance_status,
|
||||
|
@ -187,6 +191,8 @@ class Task:
|
|||
"form_schema": self.form_schema,
|
||||
"form_ui_schema": self.form_ui_schema,
|
||||
"parent": self.parent,
|
||||
"event_definition": self.event_definition,
|
||||
"call_activity_process_identifier": self.call_activity_process_identifier,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
@ -282,18 +288,19 @@ class TaskSchema(Schema):
|
|||
"multi_instance_type",
|
||||
"multi_instance_count",
|
||||
"multi_instance_index",
|
||||
"process_name",
|
||||
"process_identifier",
|
||||
"properties",
|
||||
"process_instance_id",
|
||||
"form_schema",
|
||||
"form_ui_schema",
|
||||
"event_definition",
|
||||
]
|
||||
|
||||
multi_instance_type = EnumField(MultiInstanceType)
|
||||
documentation = marshmallow.fields.String(required=False, allow_none=True)
|
||||
# form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)
|
||||
title = marshmallow.fields.String(required=False, allow_none=True)
|
||||
process_name = marshmallow.fields.String(required=False, allow_none=True)
|
||||
process_identifier = marshmallow.fields.String(required=False, allow_none=True)
|
||||
lane = marshmallow.fields.String(required=False, allow_none=True)
|
||||
|
||||
@marshmallow.post_load
|
||||
|
|
|
@ -1,22 +1,15 @@
|
|||
"""User."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
import marshmallow
|
||||
from flask import current_app
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationProviderTypes,
|
||||
)
|
||||
|
||||
|
||||
class UserNotFoundError(Exception):
|
||||
|
@ -28,15 +21,18 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
__tablename__ = "user"
|
||||
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# server and service id must be unique, not username.
|
||||
username = db.Column(db.String(255), nullable=False, unique=False)
|
||||
uid = db.Column(db.String(50), unique=True)
|
||||
service = db.Column(db.String(50), nullable=False, unique=False)
|
||||
username = db.Column(
|
||||
db.String(255), nullable=False, unique=True
|
||||
) # should always be a unique value
|
||||
service = db.Column(
|
||||
db.String(255), nullable=False, unique=False
|
||||
) # not 'openid' -- google, aws
|
||||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||
name = db.Column(db.String(255))
|
||||
display_name = db.Column(db.String(255))
|
||||
email = db.Column(db.String(255))
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") # type: ignore
|
||||
groups = relationship( # type: ignore
|
||||
|
@ -47,21 +43,6 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
)
|
||||
principal = relationship("PrincipalModel", uselist=False) # type: ignore
|
||||
|
||||
@validates("service")
|
||||
def validate_service(self, key: str, value: Any) -> str:
|
||||
"""Validate_service."""
|
||||
try:
|
||||
ap_type = getattr(AuthenticationProviderTypes, value, None)
|
||||
except Exception as e:
|
||||
raise ValueError(f"invalid service type: {value}") from e
|
||||
if ap_type is not None:
|
||||
ap_value: str = ap_type.value
|
||||
return ap_value
|
||||
raise ApiError(
|
||||
error_code="invalid_service",
|
||||
message=f"Could not validate service with value: {value}",
|
||||
)
|
||||
|
||||
def encode_auth_token(self) -> str:
|
||||
"""Generate the Auth Token.
|
||||
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
"""UserGroupAssignment."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
|
||||
|
||||
class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
|
||||
"""When a user is assigned to a group, but that username does not exist.
|
||||
|
||||
We cache it here to be applied in the event the user does log in to the system.
|
||||
"""
|
||||
|
||||
MATCH_ALL_USERS = "*"
|
||||
__tablename__ = "user_group_assignment_waiting"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"username", "group_id", name="user_group_assignment_staged_unique"
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
username = db.Column(db.String(255), nullable=False)
|
||||
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False)
|
||||
|
||||
group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore
|
||||
|
||||
def is_match_all(self) -> bool:
|
||||
"""Is_match_all."""
|
||||
if self.username == self.MATCH_ALL_USERS:
|
||||
return True
|
||||
return False
|
|
@ -141,7 +141,7 @@ def process_model_save(process_model_id: str, file_name: str) -> Union[str, Resp
|
|||
@admin_blueprint.route("/process-models/<process_model_id>/run", methods=["GET"])
|
||||
def process_model_run(process_model_id: str) -> Union[str, Response]:
|
||||
"""Process_model_run."""
|
||||
user = UserService.create_user("internal", "Mr. Test", username="Mr. Test")
|
||||
user = UserService.create_user("Mr. Test", "internal", "Mr. Test")
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_id, user
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
|
||||
import flask.wrappers
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
def status() -> flask.wrappers.Response:
|
||||
"""Status."""
|
||||
ProcessInstanceModel.query.filter().first()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
|
@ -0,0 +1,176 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
MessageTriggerableProcessModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
|
||||
|
||||
def message_instance_list(
|
||||
process_instance_id: Optional[int] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Message_instance_list."""
|
||||
# to make sure the process instance exists
|
||||
message_instances_query = MessageInstanceModel.query
|
||||
|
||||
if process_instance_id:
|
||||
message_instances_query = message_instances_query.filter_by(
|
||||
process_instance_id=process_instance_id
|
||||
)
|
||||
|
||||
message_instances = (
|
||||
message_instances_query.order_by(
|
||||
MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
|
||||
MessageInstanceModel.id.desc(), # type: ignore
|
||||
)
|
||||
.join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id)
|
||||
.join(ProcessInstanceModel)
|
||||
.add_columns(
|
||||
MessageModel.identifier.label("message_identifier"),
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.process_model_display_name,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
for message_instance in message_instances:
|
||||
message_correlations: dict = {}
|
||||
for (
|
||||
mcmi
|
||||
) in (
|
||||
message_instance.MessageInstanceModel.message_correlations_message_instances
|
||||
):
|
||||
mc = MessageCorrelationModel.query.filter_by(
|
||||
id=mcmi.message_correlation_id
|
||||
).all()
|
||||
for m in mc:
|
||||
if m.name not in message_correlations:
|
||||
message_correlations[m.name] = {}
|
||||
message_correlations[m.name][
|
||||
m.message_correlation_property.identifier
|
||||
] = m.value
|
||||
message_instance.MessageInstanceModel.message_correlations = (
|
||||
message_correlations
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": message_instances.items,
|
||||
"pagination": {
|
||||
"count": len(message_instances.items),
|
||||
"total": message_instances.total,
|
||||
"pages": message_instances.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
# body: {
|
||||
# payload: dict,
|
||||
# process_instance_id: Optional[int],
|
||||
# }
|
||||
def message_start(
|
||||
message_identifier: str,
|
||||
body: Dict[str, Any],
|
||||
) -> flask.wrappers.Response:
|
||||
"""Message_start."""
|
||||
message_model = MessageModel.query.filter_by(identifier=message_identifier).first()
|
||||
if message_model is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="unknown_message",
|
||||
message=f"Could not find message with identifier: {message_identifier}",
|
||||
status_code=404,
|
||||
)
|
||||
)
|
||||
|
||||
if "payload" not in body:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="missing_payload",
|
||||
message="Body is missing payload.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
process_instance = None
|
||||
if "process_instance_id" in body:
|
||||
# to make sure we have a valid process_instance_id
|
||||
process_instance = _find_process_instance_by_id_or_raise(
|
||||
body["process_instance_id"]
|
||||
)
|
||||
|
||||
message_instance = MessageInstanceModel.query.filter_by(
|
||||
process_instance_id=process_instance.id,
|
||||
message_model_id=message_model.id,
|
||||
message_type="receive",
|
||||
status="ready",
|
||||
).first()
|
||||
if message_instance is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="cannot_find_waiting_message",
|
||||
message=(
|
||||
"Could not find waiting message for identifier"
|
||||
f" {message_identifier} and process instance"
|
||||
f" {process_instance.id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
MessageService.process_message_receive(
|
||||
message_instance, message_model.name, body["payload"]
|
||||
)
|
||||
|
||||
else:
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_model_id=message_model.id
|
||||
).first()
|
||||
)
|
||||
|
||||
if message_triggerable_process_model is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="cannot_start_message",
|
||||
message=(
|
||||
"Message with identifier cannot be start with message:"
|
||||
f" {message_identifier}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
process_instance = MessageService.process_message_triggerable_process_model(
|
||||
message_triggerable_process_model,
|
||||
message_model.name,
|
||||
body["payload"],
|
||||
g.user,
|
||||
)
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
|
@ -111,6 +111,7 @@ def token() -> dict:
|
|||
"iat": time.time(),
|
||||
"exp": time.time() + 86400, # Expire after a day.
|
||||
"sub": user_name,
|
||||
"email": user_details["email"],
|
||||
"preferred_username": user_details.get("preferred_username", user_name),
|
||||
},
|
||||
client_secret,
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,130 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
def process_group_create(body: dict) -> flask.wrappers.Response:
|
||||
"""Add_process_group."""
|
||||
process_group = ProcessGroup(**body)
|
||||
ProcessModelService.add_process_group(process_group)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} added process group {process_group.id}"
|
||||
)
|
||||
return make_response(jsonify(process_group), 201)
|
||||
|
||||
|
||||
def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
|
||||
"""Process_group_delete."""
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
ProcessModelService().process_group_delete(process_group_id)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process group {process_group_id}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_group_update(
|
||||
modified_process_group_id: str, body: dict
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process Group Update."""
|
||||
body_include_list = ["display_name", "description"]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
process_group = ProcessGroup(id=process_group_id, **body_filtered)
|
||||
ProcessModelService.update_process_group(process_group)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process group {process_group_id}"
|
||||
)
|
||||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_group_list(
|
||||
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_group_list."""
|
||||
if process_group_identifier is not None:
|
||||
process_groups = ProcessModelService.get_process_groups(
|
||||
process_group_identifier
|
||||
)
|
||||
else:
|
||||
process_groups = ProcessModelService.get_process_groups()
|
||||
batch = ProcessModelService().get_batch(
|
||||
items=process_groups, page=page, per_page=per_page
|
||||
)
|
||||
pages = len(process_groups) // per_page
|
||||
remainder = len(process_groups) % per_page
|
||||
if remainder > 0:
|
||||
pages += 1
|
||||
|
||||
response_json = {
|
||||
"results": ProcessGroupSchema(many=True).dump(batch),
|
||||
"pagination": {
|
||||
"count": len(batch),
|
||||
"total": len(process_groups),
|
||||
"pages": pages,
|
||||
},
|
||||
}
|
||||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_group_show(
|
||||
modified_process_group_id: str,
|
||||
) -> Any:
|
||||
"""Process_group_show."""
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
try:
|
||||
process_group = ProcessModelService.get_process_group(process_group_id)
|
||||
except ProcessEntityNotFoundError as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_group_cannot_be_found",
|
||||
message=f"Process group cannot be found: {process_group_id}",
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
process_group.parent_groups = ProcessModelService.get_parent_group_array(
|
||||
process_group.id
|
||||
)
|
||||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_group_move(
|
||||
modified_process_group_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_group_move."""
|
||||
original_process_group_id = _un_modify_modified_process_model_id(
|
||||
modified_process_group_identifier
|
||||
)
|
||||
new_process_group = ProcessModelService().process_group_move(
|
||||
original_process_group_id, new_location
|
||||
)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} moved process group {original_process_group_id} to"
|
||||
f" {new_process_group.id}"
|
||||
)
|
||||
return make_response(jsonify(new_process_group), 200)
|
|
@ -0,0 +1,693 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import TaskState # type: ignore
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
||||
from spiffworkflow_backend.models.process_instance import (
|
||||
ProcessInstanceCannotBeDeletedError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.models.process_instance_metadata import (
|
||||
ProcessInstanceMetadataModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||
from spiffworkflow_backend.services.git_service import GitCommandError
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportFilter,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
def process_instance_create(
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_identifier, g.user
|
||||
)
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def process_instance_run(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
do_engine_steps: bool = True,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_run."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
if process_instance.status != "not_started":
|
||||
raise ApiError(
|
||||
error_code="process_instance_not_runnable",
|
||||
message=(
|
||||
f"Process Instance ({process_instance.id}) is currently running or has"
|
||||
" already run."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
if do_engine_steps:
|
||||
try:
|
||||
processor.do_engine_steps(save=True)
|
||||
except ApiError as e:
|
||||
ErrorHandlingService().handle_error(processor, e)
|
||||
raise e
|
||||
except Exception as e:
|
||||
ErrorHandlingService().handle_error(processor, e)
|
||||
task = processor.bpmn_process_instance.last_task
|
||||
raise ApiError.from_task(
|
||||
error_code="unknown_exception",
|
||||
message=f"An unknown error occurred. Original error: {e}",
|
||||
status_code=400,
|
||||
task=task,
|
||||
) from e
|
||||
|
||||
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
|
||||
MessageService.process_message_instances()
|
||||
|
||||
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
|
||||
processor
|
||||
)
|
||||
process_instance_data = processor.get_data()
|
||||
process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
|
||||
process_instance_metadata["data"] = process_instance_data
|
||||
return Response(
|
||||
json.dumps(process_instance_metadata), status=200, mimetype="application/json"
|
||||
)
|
||||
|
||||
|
||||
def process_instance_terminate(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_run."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.terminate()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_suspend(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_suspend."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.suspend()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_resume(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_resume."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.resume()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_log_list(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
detailed: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_log_list."""
|
||||
# to make sure the process instance exists
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
log_query = SpiffLoggingModel.query.filter(
|
||||
SpiffLoggingModel.process_instance_id == process_instance.id
|
||||
)
|
||||
if not detailed:
|
||||
log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
|
||||
|
||||
logs = (
|
||||
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
|
||||
.join(
|
||||
UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True
|
||||
) # isouter since if we don't have a user, we still want the log
|
||||
.add_columns(
|
||||
UserModel.username,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": logs.items,
|
||||
"pagination": {
|
||||
"count": len(logs.items),
|
||||
"total": logs.total,
|
||||
"pages": logs.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def process_instance_list_for_me(
|
||||
process_model_identifier: Optional[str] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
start_from: Optional[int] = None,
|
||||
start_to: Optional[int] = None,
|
||||
end_from: Optional[int] = None,
|
||||
end_to: Optional[int] = None,
|
||||
process_status: Optional[str] = None,
|
||||
user_filter: Optional[bool] = False,
|
||||
report_identifier: Optional[str] = None,
|
||||
report_id: Optional[int] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list_for_me."""
|
||||
return process_instance_list(
|
||||
process_model_identifier=process_model_identifier,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
user_filter=user_filter,
|
||||
report_identifier=report_identifier,
|
||||
report_id=report_id,
|
||||
user_group_identifier=user_group_identifier,
|
||||
with_relation_to_me=True,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_list(
|
||||
process_model_identifier: Optional[str] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
start_from: Optional[int] = None,
|
||||
start_to: Optional[int] = None,
|
||||
end_from: Optional[int] = None,
|
||||
end_to: Optional[int] = None,
|
||||
process_status: Optional[str] = None,
|
||||
with_relation_to_me: Optional[bool] = None,
|
||||
user_filter: Optional[bool] = False,
|
||||
report_identifier: Optional[str] = None,
|
||||
report_id: Optional[int] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list."""
|
||||
process_instance_report = ProcessInstanceReportService.report_with_identifier(
|
||||
g.user, report_id, report_identifier
|
||||
)
|
||||
|
||||
if user_filter:
|
||||
report_filter = ProcessInstanceReportFilter(
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_status=process_status.split(",") if process_status else None,
|
||||
)
|
||||
else:
|
||||
report_filter = (
|
||||
ProcessInstanceReportService.filter_from_metadata_with_overrides(
|
||||
process_instance_report=process_instance_report,
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
)
|
||||
)
|
||||
|
||||
response_json = ProcessInstanceReportService.run_process_instance_report(
|
||||
report_filter=report_filter,
|
||||
process_instance_report=process_instance_report,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
user=g.user,
|
||||
)
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def process_instance_report_column_list() -> flask.wrappers.Response:
|
||||
"""Process_instance_report_column_list."""
|
||||
table_columns = ProcessInstanceReportService.builtin_column_options()
|
||||
columns_for_metadata = (
|
||||
db.session.query(ProcessInstanceMetadataModel.key)
|
||||
.order_by(ProcessInstanceMetadataModel.key)
|
||||
.distinct() # type: ignore
|
||||
.all()
|
||||
)
|
||||
columns_for_metadata_strings = [
|
||||
{"Header": i[0], "accessor": i[0], "filterable": True}
|
||||
for i in columns_for_metadata
|
||||
]
|
||||
return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
|
||||
|
||||
|
||||
def process_instance_show_for_me(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
process_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_show_for_me."""
|
||||
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
|
||||
return _get_process_instance(
|
||||
process_instance=process_instance,
|
||||
modified_process_model_identifier=modified_process_model_identifier,
|
||||
process_identifier=process_identifier,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_show(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
process_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return _get_process_instance(
|
||||
process_instance=process_instance,
|
||||
modified_process_model_identifier=modified_process_model_identifier,
|
||||
process_identifier=process_identifier,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_delete(
|
||||
process_instance_id: int, modified_process_model_identifier: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
if not process_instance.has_terminal_status():
|
||||
raise ProcessInstanceCannotBeDeletedError(
|
||||
f"Process instance ({process_instance.id}) cannot be deleted since it does"
|
||||
f" not have a terminal status. Current status is {process_instance.status}."
|
||||
)
|
||||
|
||||
# (Pdb) db.session.delete
|
||||
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
|
||||
db.session.query(SpiffLoggingModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.query(SpiffStepDetailsModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.delete(process_instance)
|
||||
db.session.commit()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_report_list(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_list."""
|
||||
process_instance_reports = ProcessInstanceReportModel.query.filter_by(
|
||||
created_by_id=g.user.id,
|
||||
).all()
|
||||
|
||||
return make_response(jsonify(process_instance_reports), 200)
|
||||
|
||||
|
||||
def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_create."""
|
||||
process_instance_report = ProcessInstanceReportModel.create_report(
|
||||
identifier=body["identifier"],
|
||||
user=g.user,
|
||||
report_metadata=body["report_metadata"],
|
||||
)
|
||||
|
||||
return make_response(jsonify(process_instance_report), 201)
|
||||
|
||||
|
||||
def process_instance_report_update(
|
||||
report_id: int,
|
||||
body: Dict[str, Any],
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_update."""
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
id=report_id,
|
||||
created_by_id=g.user.id,
|
||||
).first()
|
||||
if process_instance_report is None:
|
||||
raise ApiError(
|
||||
error_code="unknown_process_instance_report",
|
||||
message="Unknown process instance report",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
process_instance_report.report_metadata = body["report_metadata"]
|
||||
db.session.commit()
|
||||
|
||||
return make_response(jsonify(process_instance_report), 201)
|
||||
|
||||
|
||||
def process_instance_report_delete(
|
||||
report_id: int,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_delete."""
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
id=report_id,
|
||||
created_by_id=g.user.id,
|
||||
).first()
|
||||
if process_instance_report is None:
|
||||
raise ApiError(
|
||||
error_code="unknown_process_instance_report",
|
||||
message="Unknown process instance report",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
db.session.delete(process_instance_report)
|
||||
db.session.commit()
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_report_show(
|
||||
report_id: int,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_show."""
|
||||
process_instances = ProcessInstanceModel.query.order_by(
|
||||
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
|
||||
).paginate(page=page, per_page=per_page, error_out=False)
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
id=report_id,
|
||||
created_by_id=g.user.id,
|
||||
).first()
|
||||
if process_instance_report is None:
|
||||
raise ApiError(
|
||||
error_code="unknown_process_instance_report",
|
||||
message="Unknown process instance report",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
substitution_variables = request.args.to_dict()
|
||||
result_dict = process_instance_report.generate_report(
|
||||
process_instances.items, substitution_variables
|
||||
)
|
||||
|
||||
# update this if we go back to a database query instead of filtering in memory
|
||||
result_dict["pagination"] = {
|
||||
"count": len(result_dict["results"]),
|
||||
"total": len(result_dict["results"]),
|
||||
"pages": 1,
|
||||
}
|
||||
|
||||
return Response(json.dumps(result_dict), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_task_list_without_task_data_for_me(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_without_task_data_for_me."""
|
||||
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
get_task_data=False,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list_without_task_data(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_without_task_data."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
get_task_data=False,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list_with_task_data(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_with_task_data."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
get_task_data=True,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list(
|
||||
_modified_process_model_identifier: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
get_task_data: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list."""
|
||||
if spiff_step > 0:
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if step_detail is not None and process_instance.bpmn_json is not None:
|
||||
bpmn_json = json.loads(process_instance.bpmn_json)
|
||||
bpmn_json["tasks"] = step_detail.task_json["tasks"]
|
||||
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
spiff_tasks = None
|
||||
if all_tasks:
|
||||
spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
else:
|
||||
spiff_tasks = processor.get_all_user_tasks()
|
||||
|
||||
tasks = []
|
||||
for spiff_task in spiff_tasks:
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||
if get_task_data:
|
||||
task.data = spiff_task.data
|
||||
tasks.append(task)
|
||||
|
||||
return make_response(jsonify(tasks), 200)
|
||||
|
||||
|
||||
def process_instance_reset(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_reset."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if step_detail is not None and process_instance.bpmn_json is not None:
|
||||
bpmn_json = json.loads(process_instance.bpmn_json)
|
||||
bpmn_json["tasks"] = step_detail.task_json["tasks"]
|
||||
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
db.session.add(process_instance)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise ApiError(
|
||||
error_code="reset_process_instance_error",
|
||||
message=f"Could not update the Instance. Original error is {e}",
|
||||
) from e
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def _get_process_instance(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
process_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""_get_process_instance."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
try:
|
||||
current_version_control_revision = GitService.get_current_revision()
|
||||
except GitCommandError:
|
||||
current_version_control_revision = ""
|
||||
|
||||
process_model_with_diagram = None
|
||||
name_of_file_with_diagram = None
|
||||
if process_identifier:
|
||||
spec_reference = SpecReferenceCache.query.filter_by(
|
||||
identifier=process_identifier, type="process"
|
||||
).first()
|
||||
if spec_reference is None:
|
||||
raise SpecReferenceNotFoundError(
|
||||
"Could not find given process identifier in the cache:"
|
||||
f" {process_identifier}"
|
||||
)
|
||||
|
||||
process_model_with_diagram = ProcessModelService.get_process_model(
|
||||
spec_reference.process_model_id
|
||||
)
|
||||
name_of_file_with_diagram = spec_reference.file_name
|
||||
else:
|
||||
process_model_with_diagram = _get_process_model(process_model_identifier)
|
||||
if process_model_with_diagram.primary_file_name:
|
||||
name_of_file_with_diagram = process_model_with_diagram.primary_file_name
|
||||
|
||||
if process_model_with_diagram and name_of_file_with_diagram:
|
||||
if (
|
||||
process_instance.bpmn_version_control_identifier
|
||||
== current_version_control_revision
|
||||
):
|
||||
bpmn_xml_file_contents = SpecFileService.get_data(
|
||||
process_model_with_diagram, name_of_file_with_diagram
|
||||
).decode("utf-8")
|
||||
else:
|
||||
bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
|
||||
process_model_with_diagram,
|
||||
process_instance.bpmn_version_control_identifier,
|
||||
file_name=name_of_file_with_diagram,
|
||||
)
|
||||
process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
|
||||
|
||||
return make_response(jsonify(process_instance), 200)
|
||||
|
||||
|
||||
def _find_process_instance_for_me_or_raise(
|
||||
process_instance_id: int,
|
||||
) -> ProcessInstanceModel:
|
||||
"""_find_process_instance_for_me_or_raise."""
|
||||
process_instance: ProcessInstanceModel = (
|
||||
ProcessInstanceModel.query.filter_by(id=process_instance_id)
|
||||
.outerjoin(HumanTaskModel)
|
||||
.outerjoin(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
HumanTaskUserModel.user_id == g.user.id,
|
||||
),
|
||||
)
|
||||
.filter(
|
||||
or_(
|
||||
HumanTaskUserModel.id.is_not(None),
|
||||
ProcessInstanceModel.process_initiator_id == g.user.id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if process_instance is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_instance_cannot_be_found",
|
||||
message=(
|
||||
f"Process instance with id {process_instance_id} cannot be found"
|
||||
" that is associated with you."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
return process_instance
|
|
@ -0,0 +1,481 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
import connexion # type: ignore
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.file import FileSchema
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.git_service import MissingGitConfigsError
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
def process_model_create(
|
||||
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_create."""
|
||||
body_include_list = [
|
||||
"id",
|
||||
"display_name",
|
||||
"primary_file_name",
|
||||
"primary_process_id",
|
||||
"description",
|
||||
"metadata_extraction_paths",
|
||||
]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
|
||||
_get_process_group_from_modified_identifier(modified_process_group_id)
|
||||
|
||||
process_model_info = ProcessModelInfo(**body_filtered) # type: ignore
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
message=f"Process Model could not be created from given body: {body}",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
ProcessModelService.add_process_model(process_model_info)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} created process model {process_model_info.id}"
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def process_model_delete(
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_delete."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
ProcessModelService().process_model_delete(process_model_identifier)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process model {process_model_identifier}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_update(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> Any:
|
||||
"""Process_model_update."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
body_include_list = [
|
||||
"display_name",
|
||||
"primary_file_name",
|
||||
"primary_process_id",
|
||||
"description",
|
||||
"metadata_extraction_paths",
|
||||
]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
ProcessModelService.update_process_model(process_model, body_filtered)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process model {process_model_identifier}"
|
||||
)
|
||||
return ProcessModelInfoSchema().dump(process_model)
|
||||
|
||||
|
||||
def process_model_show(modified_process_model_identifier: str) -> Any:
|
||||
"""Process_model_show."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
files = sorted(
|
||||
SpecFileService.get_files(process_model),
|
||||
key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
|
||||
)
|
||||
process_model.files = files
|
||||
for file in process_model.files:
|
||||
file.references = SpecFileService.get_references_for_file(file, process_model)
|
||||
|
||||
process_model.parent_groups = ProcessModelService.get_parent_group_array(
|
||||
process_model.id
|
||||
)
|
||||
return make_response(jsonify(process_model), 200)
|
||||
|
||||
|
||||
def process_model_move(
|
||||
modified_process_model_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_move."""
|
||||
original_process_model_id = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
new_process_model = ProcessModelService().process_model_move(
|
||||
original_process_model_id, new_location
|
||||
)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} moved process model {original_process_model_id} to"
|
||||
f" {new_process_model.id}"
|
||||
)
|
||||
return make_response(jsonify(new_process_model), 200)
|
||||
|
||||
|
||||
def process_model_publish(
|
||||
modified_process_model_identifier: str, branch_to_update: Optional[str] = None
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_publish."""
|
||||
if branch_to_update is None:
|
||||
branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
|
||||
if branch_to_update is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
pr_url = GitService().publish(process_model_identifier, branch_to_update)
|
||||
data = {"ok": True, "pr_url": pr_url}
|
||||
return Response(json.dumps(data), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_list(
|
||||
process_group_identifier: Optional[str] = None,
|
||||
recursive: Optional[bool] = False,
|
||||
filter_runnable_by_user: Optional[bool] = False,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process model list!"""
|
||||
process_models = ProcessModelService.get_process_models(
|
||||
process_group_id=process_group_identifier,
|
||||
recursive=recursive,
|
||||
filter_runnable_by_user=filter_runnable_by_user,
|
||||
)
|
||||
batch = ProcessModelService().get_batch(
|
||||
process_models, page=page, per_page=per_page
|
||||
)
|
||||
pages = len(process_models) // per_page
|
||||
remainder = len(process_models) % per_page
|
||||
if remainder > 0:
|
||||
pages += 1
|
||||
response_json = {
|
||||
"results": ProcessModelInfoSchema(many=True).dump(batch),
|
||||
"pagination": {
|
||||
"count": len(batch),
|
||||
"total": len(process_models),
|
||||
"pages": pages,
|
||||
},
|
||||
}
|
||||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_file_update(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_update."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
||||
request_file = _get_file_from_request()
|
||||
request_file_contents = request_file.stream.read()
|
||||
if not request_file_contents:
|
||||
raise ApiError(
|
||||
error_code="file_contents_empty",
|
||||
message="Given request file does not have any content",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
SpecFileService.update_file(process_model, file_name, request_file_contents)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} clicked save for"
|
||||
f" {process_model_identifier}/{file_name}"
|
||||
)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_file_delete(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_delete."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
try:
|
||||
SpecFileService.delete_file(process_model, file_name)
|
||||
except FileNotFoundError as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_model_file_cannot_be_found",
|
||||
message=f"Process model file cannot be found: {file_name}",
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process model file"
|
||||
f" {process_model_identifier}/{file_name}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_file_create(
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_create."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
request_file = _get_file_from_request()
|
||||
if not request_file.filename:
|
||||
raise ApiError(
|
||||
error_code="could_not_get_filename",
|
||||
message="Could not get filename from request",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
file = SpecFileService.add_file(
|
||||
process_model, request_file.filename, request_file.stream.read()
|
||||
)
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
file.file_contents = file_contents
|
||||
file.process_model_id = process_model.id
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} added process model file"
|
||||
f" {process_model_identifier}/{file.name}"
|
||||
)
|
||||
return Response(
|
||||
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
|
||||
)
|
||||
|
||||
|
||||
def process_model_file_show(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> Any:
|
||||
"""Process_model_file_show."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
files = SpecFileService.get_files(process_model, file_name)
|
||||
if len(files) == 0:
|
||||
raise ApiError(
|
||||
error_code="unknown file",
|
||||
message=(
|
||||
f"No information exists for file {file_name}"
|
||||
f" it does not exist in workflow {process_model_identifier}."
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
file = files[0]
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
file.file_contents = file_contents
|
||||
file.process_model_id = process_model.id
|
||||
return FileSchema().dump(file)
|
||||
|
||||
|
||||
# {
|
||||
# "natural_language_text": "Create a bug tracker process model \
|
||||
# with a bug-details form that collects summary, description, and priority"
|
||||
# }
|
||||
def process_model_create_with_natural_language(
|
||||
modified_process_group_id: str, body: Dict[str, str]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_create_with_natural_language."""
|
||||
pattern = re.compile(
|
||||
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that"
|
||||
r" collects (?P<columns>.*)"
|
||||
)
|
||||
match = pattern.match(body["natural_language_text"])
|
||||
if match is None:
|
||||
raise ApiError(
|
||||
error_code="natural_language_text_not_yet_supported",
|
||||
message=(
|
||||
"Natural language text is not yet supported. Please use the form:"
|
||||
f" {pattern.pattern}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
process_model_display_name = match.group("pm_name")
|
||||
process_model_identifier = re.sub(r"[ _]", "-", process_model_display_name)
|
||||
process_model_identifier = re.sub(r"-{2,}", "-", process_model_identifier).lower()
|
||||
|
||||
form_name = match.group("form_name")
|
||||
form_identifier = re.sub(r"[ _]", "-", form_name)
|
||||
form_identifier = re.sub(r"-{2,}", "-", form_identifier).lower()
|
||||
|
||||
column_names = match.group("columns")
|
||||
columns = re.sub(r"(, (and )?)", ",", column_names).split(",")
|
||||
|
||||
process_group = _get_process_group_from_modified_identifier(
|
||||
modified_process_group_id
|
||||
)
|
||||
qualified_process_model_identifier = (
|
||||
f"{process_group.id}/{process_model_identifier}"
|
||||
)
|
||||
|
||||
metadata_extraction_paths = []
|
||||
for column in columns:
|
||||
metadata_extraction_paths.append({"key": column, "path": column})
|
||||
|
||||
process_model_attributes = {
|
||||
"id": qualified_process_model_identifier,
|
||||
"display_name": process_model_display_name,
|
||||
"description": None,
|
||||
"metadata_extraction_paths": metadata_extraction_paths,
|
||||
}
|
||||
|
||||
process_model_info = ProcessModelInfo(**process_model_attributes) # type: ignore
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
message=f"Process Model could not be created from given body: {body}",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
bpmn_template_file = os.path.join(
|
||||
current_app.root_path, "templates", "basic_with_user_task_template.bpmn"
|
||||
)
|
||||
if not os.path.exists(bpmn_template_file):
|
||||
raise ApiError(
|
||||
error_code="bpmn_template_file_does_not_exist",
|
||||
message="Could not find the bpmn template file to create process model.",
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
ProcessModelService.add_process_model(process_model_info)
|
||||
bpmn_process_identifier = f"{process_model_identifier}_process"
|
||||
bpmn_template_contents = ""
|
||||
with open(bpmn_template_file, encoding="utf-8") as f:
|
||||
bpmn_template_contents = f.read()
|
||||
|
||||
bpmn_template_contents = bpmn_template_contents.replace(
|
||||
"natural_language_process_id_template", bpmn_process_identifier
|
||||
)
|
||||
bpmn_template_contents = bpmn_template_contents.replace(
|
||||
"form-identifier-id-template", form_identifier
|
||||
)
|
||||
|
||||
form_uischema_json: dict = {"ui:order": columns}
|
||||
|
||||
form_properties: dict = {}
|
||||
for column in columns:
|
||||
form_properties[column] = {
|
||||
"type": "string",
|
||||
"title": column,
|
||||
}
|
||||
form_schema_json = {
|
||||
"title": form_identifier,
|
||||
"description": "",
|
||||
"properties": form_properties,
|
||||
"required": [],
|
||||
}
|
||||
|
||||
SpecFileService.add_file(
|
||||
process_model_info,
|
||||
f"{process_model_identifier}.bpmn",
|
||||
str.encode(bpmn_template_contents),
|
||||
)
|
||||
SpecFileService.add_file(
|
||||
process_model_info,
|
||||
f"{form_identifier}-schema.json",
|
||||
str.encode(json.dumps(form_schema_json)),
|
||||
)
|
||||
SpecFileService.add_file(
|
||||
process_model_info,
|
||||
f"{form_identifier}-uischema.json",
|
||||
str.encode(json.dumps(form_uischema_json)),
|
||||
)
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} created process model via natural language:"
|
||||
f" {process_model_info.id}"
|
||||
)
|
||||
|
||||
default_report_metadata = ProcessInstanceReportService.system_metadata_map(
|
||||
"default"
|
||||
)
|
||||
for column in columns:
|
||||
default_report_metadata["columns"].append(
|
||||
{"Header": column, "accessor": column, "filterable": True}
|
||||
)
|
||||
ProcessInstanceReportModel.create_report(
|
||||
identifier=process_model_identifier,
|
||||
user=g.user,
|
||||
report_metadata=default_report_metadata,
|
||||
)
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def _get_file_from_request() -> Any:
|
||||
"""Get_file_from_request."""
|
||||
request_file = connexion.request.files.get("file")
|
||||
if not request_file:
|
||||
raise ApiError(
|
||||
error_code="no_file_given",
|
||||
message="Given request does not contain a file",
|
||||
status_code=400,
|
||||
)
|
||||
return request_file
|
||||
|
||||
|
||||
def _get_process_group_from_modified_identifier(
|
||||
modified_process_group_id: str,
|
||||
) -> ProcessGroup:
|
||||
"""_get_process_group_from_modified_identifier."""
|
||||
if modified_process_group_id is None:
|
||||
raise ApiError(
|
||||
error_code="process_group_id_not_specified",
|
||||
message=(
|
||||
"Process Model could not be created when process_group_id path param is"
|
||||
" unspecified"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
unmodified_process_group_id = _un_modify_modified_process_model_id(
|
||||
modified_process_group_id
|
||||
)
|
||||
process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
|
||||
if process_group is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
message=(
|
||||
"Process Model could not be created from given body because Process"
|
||||
f" Group could not be found: {unmodified_process_group_id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
return process_group
|
|
@ -0,0 +1,134 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
from typing import Dict
|
||||
from typing import Union
|
||||
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from lxml import etree # type: ignore
|
||||
from lxml.builder import ElementMaker # type: ignore
|
||||
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_get_required_parameter_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
def script_unit_test_create(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Script_unit_test_create."""
|
||||
bpmn_task_identifier = _get_required_parameter_or_raise(
|
||||
"bpmn_task_identifier", body
|
||||
)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
expected_output_json = _get_required_parameter_or_raise(
|
||||
"expected_output_json", body
|
||||
)
|
||||
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
|
||||
if file is None:
|
||||
raise ApiError(
|
||||
error_code="cannot_find_file",
|
||||
message=(
|
||||
"Could not find the primary bpmn file for process_model:"
|
||||
f" {process_model.id}"
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
# TODO: move this to an xml service or something
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
bpmn_etree_element = etree.fromstring(file_contents)
|
||||
|
||||
nsmap = bpmn_etree_element.nsmap
|
||||
spiff_element_maker = ElementMaker(
|
||||
namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
|
||||
)
|
||||
|
||||
script_task_elements = bpmn_etree_element.xpath(
|
||||
f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
|
||||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if len(script_task_elements) == 0:
|
||||
raise ApiError(
|
||||
error_code="missing_script_task",
|
||||
message=f"Cannot find a script task with id: {bpmn_task_identifier}",
|
||||
status_code=404,
|
||||
)
|
||||
script_task_element = script_task_elements[0]
|
||||
|
||||
extension_elements = None
|
||||
extension_elements_array = script_task_element.xpath(
|
||||
".//bpmn:extensionElements",
|
||||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if len(extension_elements_array) == 0:
|
||||
bpmn_element_maker = ElementMaker(
|
||||
namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
|
||||
)
|
||||
extension_elements = bpmn_element_maker("extensionElements")
|
||||
script_task_element.append(extension_elements)
|
||||
else:
|
||||
extension_elements = extension_elements_array[0]
|
||||
|
||||
unit_test_elements = None
|
||||
unit_test_elements_array = extension_elements.xpath(
|
||||
"//spiffworkflow:unitTests",
|
||||
namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"},
|
||||
)
|
||||
if len(unit_test_elements_array) == 0:
|
||||
unit_test_elements = spiff_element_maker("unitTests")
|
||||
extension_elements.append(unit_test_elements)
|
||||
else:
|
||||
unit_test_elements = unit_test_elements_array[0]
|
||||
|
||||
fuzz = "".join(
|
||||
random.choice(string.ascii_uppercase + string.digits) # noqa: S311
|
||||
for _ in range(7)
|
||||
)
|
||||
unit_test_id = f"unit_test_{fuzz}"
|
||||
|
||||
input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
|
||||
expected_output_json_element = spiff_element_maker(
|
||||
"expectedOutputJson", json.dumps(expected_output_json)
|
||||
)
|
||||
unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
|
||||
unit_test_element.append(input_json_element)
|
||||
unit_test_element.append(expected_output_json_element)
|
||||
unit_test_elements.append(unit_test_element)
|
||||
SpecFileService.update_file(
|
||||
process_model, file.name, etree.tostring(bpmn_etree_element)
|
||||
)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
||||
def script_unit_test_run(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Script_unit_test_run."""
|
||||
# FIXME: We should probably clear this somewhere else but this works
|
||||
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
||||
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
|
||||
|
||||
python_script = _get_required_parameter_or_raise("python_script", body)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
expected_output_json = _get_required_parameter_or_raise(
|
||||
"expected_output_json", body
|
||||
)
|
||||
|
||||
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
|
||||
python_script, input_json, expected_output_json
|
||||
)
|
||||
return make_response(jsonify(result), 200)
|
|
@ -0,0 +1,67 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
def secret_show(key: str) -> Optional[str]:
|
||||
"""Secret_show."""
|
||||
return SecretService.get_secret(key)
|
||||
|
||||
|
||||
def secret_list(
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> Response:
|
||||
"""Secret_list."""
|
||||
secrets = (
|
||||
SecretModel.query.order_by(SecretModel.key)
|
||||
.join(UserModel)
|
||||
.add_columns(
|
||||
UserModel.username,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
response_json = {
|
||||
"results": secrets.items,
|
||||
"pagination": {
|
||||
"count": len(secrets.items),
|
||||
"total": secrets.total,
|
||||
"pages": secrets.pages,
|
||||
},
|
||||
}
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def secret_create(body: Dict) -> Response:
|
||||
"""Add secret."""
|
||||
secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
|
||||
return Response(
|
||||
json.dumps(SecretModelSchema().dump(secret_model)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def secret_update(key: str, body: dict) -> Response:
|
||||
"""Update secret."""
|
||||
SecretService().update_secret(key, body["value"], g.user.id)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def secret_delete(key: str) -> Response:
|
||||
"""Delete secret."""
|
||||
current_user = UserService.current_user()
|
||||
SecretService.delete_secret(key, current_user.id)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
|
@ -0,0 +1,49 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
|
||||
import flask.wrappers
|
||||
import werkzeug
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.routes.user import verify_token
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskService
|
||||
|
||||
|
||||
def service_task_list() -> flask.wrappers.Response:
|
||||
"""Service_task_list."""
|
||||
available_connectors = ServiceTaskService.available_connectors()
|
||||
return Response(
|
||||
json.dumps(available_connectors), status=200, mimetype="application/json"
|
||||
)
|
||||
|
||||
|
||||
def authentication_list() -> flask.wrappers.Response:
|
||||
"""Authentication_list."""
|
||||
available_authentications = ServiceTaskService.authentication_list()
|
||||
response_json = {
|
||||
"results": available_authentications,
|
||||
"connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"],
|
||||
"redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
|
||||
}
|
||||
|
||||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def authentication_callback(
|
||||
service: str,
|
||||
auth_method: str,
|
||||
) -> werkzeug.wrappers.Response:
|
||||
"""Authentication_callback."""
|
||||
verify_token(request.args.get("token"), force_run=True)
|
||||
response = request.args["response"]
|
||||
SecretService().update_secret(
|
||||
f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True
|
||||
)
|
||||
return redirect(
|
||||
f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration"
|
||||
)
|
|
@ -0,0 +1,563 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import TypedDict
|
||||
from typing import Union
|
||||
|
||||
import flask.wrappers
|
||||
import jinja2
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import asc
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_principal_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
class TaskDataSelectOption(TypedDict):
|
||||
"""TaskDataSelectOption."""
|
||||
|
||||
value: str
|
||||
label: str
|
||||
|
||||
|
||||
class ReactJsonSchemaSelectOption(TypedDict):
|
||||
"""ReactJsonSchemaSelectOption."""
|
||||
|
||||
type: str
|
||||
title: str
|
||||
enum: list[str]
|
||||
|
||||
|
||||
# TODO: see comment for before_request
|
||||
# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
|
||||
def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Task_list_my_tasks."""
|
||||
principal = _find_principal_or_raise()
|
||||
human_tasks = (
|
||||
HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore
|
||||
.join(ProcessInstanceModel)
|
||||
.join(HumanTaskUserModel)
|
||||
.filter_by(user_id=principal.user_id)
|
||||
.filter(HumanTaskModel.completed == False) # noqa: E712
|
||||
# just need this add_columns to add the process_model_identifier. Then add everything back that was removed.
|
||||
.add_columns(
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.process_model_display_name,
|
||||
ProcessInstanceModel.status,
|
||||
HumanTaskModel.task_name,
|
||||
HumanTaskModel.task_title,
|
||||
HumanTaskModel.task_type,
|
||||
HumanTaskModel.task_status,
|
||||
HumanTaskModel.task_id,
|
||||
HumanTaskModel.id,
|
||||
HumanTaskModel.process_model_display_name,
|
||||
HumanTaskModel.process_instance_id,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items]
|
||||
|
||||
response_json = {
|
||||
"results": tasks,
|
||||
"pagination": {
|
||||
"count": len(human_tasks.items),
|
||||
"total": human_tasks.total,
|
||||
"pages": human_tasks.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def task_list_for_my_open_processes(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_my_open_processes."""
|
||||
return _get_tasks(page=page, per_page=per_page)
|
||||
|
||||
|
||||
def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Task_list_for_me."""
|
||||
return _get_tasks(
|
||||
processes_started_by_user=False,
|
||||
has_lane_assignment_id=False,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
|
||||
|
||||
def task_list_for_my_groups(
|
||||
user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_my_groups."""
|
||||
return _get_tasks(
|
||||
user_group_identifier=user_group_identifier,
|
||||
processes_started_by_user=False,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
|
||||
|
||||
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
|
||||
"""Task_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
if process_instance.status == ProcessInstanceStatus.suspended.value:
|
||||
raise ApiError(
|
||||
error_code="error_suspended",
|
||||
message="The process instance is suspended",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
process_model = _get_process_model(
|
||||
process_instance.process_model_identifier,
|
||||
)
|
||||
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
form_schema_file_name = ""
|
||||
form_ui_schema_file_name = ""
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance)
|
||||
extensions = spiff_task.task_spec.extensions
|
||||
|
||||
if "properties" in extensions:
|
||||
properties = extensions["properties"]
|
||||
if "formJsonSchemaFilename" in properties:
|
||||
form_schema_file_name = properties["formJsonSchemaFilename"]
|
||||
if "formUiSchemaFilename" in properties:
|
||||
form_ui_schema_file_name = properties["formUiSchemaFilename"]
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||
task.data = spiff_task.data
|
||||
task.process_model_display_name = process_model.display_name
|
||||
task.process_model_identifier = process_model.id
|
||||
|
||||
process_model_with_form = process_model
|
||||
refs = SpecFileService.get_references_for_process(process_model_with_form)
|
||||
all_processes = [i.identifier for i in refs]
|
||||
if task.process_identifier not in all_processes:
|
||||
bpmn_file_full_path = (
|
||||
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
|
||||
task.process_identifier
|
||||
)
|
||||
)
|
||||
relative_path = os.path.relpath(
|
||||
bpmn_file_full_path, start=FileSystemService.root_path()
|
||||
)
|
||||
process_model_relative_path = os.path.dirname(relative_path)
|
||||
process_model_with_form = (
|
||||
ProcessModelService.get_process_model_from_relative_path(
|
||||
process_model_relative_path
|
||||
)
|
||||
)
|
||||
|
||||
if task.type == "User Task":
|
||||
if not form_schema_file_name:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="missing_form_file",
|
||||
message=(
|
||||
"Cannot find a form file for process_instance_id:"
|
||||
f" {process_instance_id}, task_id: {task_id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
form_contents = _prepare_form_data(
|
||||
form_schema_file_name,
|
||||
task.data,
|
||||
process_model_with_form,
|
||||
)
|
||||
|
||||
try:
|
||||
# form_contents is a str
|
||||
form_dict = json.loads(form_contents)
|
||||
except Exception as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="error_loading_form",
|
||||
message=(
|
||||
f"Could not load form schema from: {form_schema_file_name}."
|
||||
f" Error was: {str(exception)}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
if task.data:
|
||||
_update_form_schema_with_task_data_as_needed(form_dict, task.data)
|
||||
|
||||
if form_contents:
|
||||
task.form_schema = form_dict
|
||||
|
||||
if form_ui_schema_file_name:
|
||||
ui_form_contents = _prepare_form_data(
|
||||
form_ui_schema_file_name,
|
||||
task.data,
|
||||
process_model_with_form,
|
||||
)
|
||||
if ui_form_contents:
|
||||
task.form_ui_schema = ui_form_contents
|
||||
|
||||
if task.properties and task.data and "instructionsForEndUser" in task.properties:
|
||||
if task.properties["instructionsForEndUser"]:
|
||||
task.properties["instructionsForEndUser"] = _render_jinja_template(
|
||||
task.properties["instructionsForEndUser"], task.data
|
||||
)
|
||||
return make_response(jsonify(task), 200)
|
||||
|
||||
|
||||
def process_data_show(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
all_process_data = processor.get_data()
|
||||
process_data_value = None
|
||||
if process_data_identifier in all_process_data:
|
||||
process_data_value = all_process_data[process_data_identifier]
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"process_data_identifier": process_data_identifier,
|
||||
"process_data_value": process_data_value,
|
||||
}
|
||||
),
|
||||
200,
|
||||
)
|
||||
|
||||
|
||||
def task_submit(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
body: Dict[str, Any],
|
||||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
principal = _find_principal_or_raise()
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
if not process_instance.can_submit_task():
|
||||
raise ApiError(
|
||||
error_code="process_instance_not_runnable",
|
||||
message=(
|
||||
f"Process Instance ({process_instance.id}) has status "
|
||||
f"{process_instance.status} which does not allow tasks to be submitted."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = _get_spiff_task_from_process_instance(
|
||||
task_id, process_instance, processor=processor
|
||||
)
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(
|
||||
process_instance.id, spiff_task, principal.user
|
||||
)
|
||||
|
||||
if spiff_task.state != TaskState.READY:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="invalid_state",
|
||||
message="You may not update a task unless it is in the READY state.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
if terminate_loop and spiff_task.is_looping():
|
||||
spiff_task.terminate_loop()
|
||||
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id, completed=False
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
ProcessInstanceService.complete_form_task(
|
||||
processor=processor,
|
||||
spiff_task=spiff_task,
|
||||
data=body,
|
||||
user=g.user,
|
||||
human_task=human_task,
|
||||
)
|
||||
|
||||
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
||||
# task spec, complete that form as well.
|
||||
# if update_all:
|
||||
# last_index = spiff_task.task_info()["mi_index"]
|
||||
# next_task = processor.next_task()
|
||||
# while next_task and next_task.task_info()["mi_index"] > last_index:
|
||||
# __update_task(processor, next_task, form_data, user)
|
||||
# last_index = next_task.task_info()["mi_index"]
|
||||
# next_task = processor.next_task()
|
||||
|
||||
next_human_task_assigned_to_me = (
|
||||
HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, completed=False
|
||||
)
|
||||
.order_by(asc(HumanTaskModel.id)) # type: ignore
|
||||
.join(HumanTaskUserModel)
|
||||
.filter_by(user_id=principal.user_id)
|
||||
.first()
|
||||
)
|
||||
if next_human_task_assigned_to_me:
|
||||
return make_response(
|
||||
jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200
|
||||
)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
||||
def _get_tasks(
|
||||
processes_started_by_user: bool = True,
|
||||
has_lane_assignment_id: bool = True,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Get_tasks."""
|
||||
user_id = g.user.id
|
||||
|
||||
# use distinct to ensure we only get one row per human task otherwise
|
||||
# we can get back multiple for the same human task row which throws off
|
||||
# pagination later on
|
||||
# https://stackoverflow.com/q/34582014/6090676
|
||||
human_tasks_query = (
|
||||
db.session.query(HumanTaskModel)
|
||||
.group_by(HumanTaskModel.id) # type: ignore
|
||||
.outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
|
||||
.join(ProcessInstanceModel)
|
||||
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
|
||||
.filter(HumanTaskModel.completed == False) # noqa: E712
|
||||
)
|
||||
|
||||
assigned_user = aliased(UserModel)
|
||||
if processes_started_by_user:
|
||||
human_tasks_query = (
|
||||
human_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id == user_id
|
||||
)
|
||||
.outerjoin(
|
||||
HumanTaskUserModel,
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
)
|
||||
.outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id)
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id != user_id
|
||||
).join(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskUserModel.user_id == user_id,
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
),
|
||||
)
|
||||
if has_lane_assignment_id:
|
||||
if user_group_identifier:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
GroupModel.identifier == user_group_identifier
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
|
||||
|
||||
human_tasks = (
|
||||
human_tasks_query.add_columns(
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
|
||||
ProcessInstanceModel.updated_at_in_seconds,
|
||||
ProcessInstanceModel.created_at_in_seconds,
|
||||
UserModel.username.label("process_initiator_username"),
|
||||
GroupModel.identifier.label("assigned_user_group_identifier"),
|
||||
HumanTaskModel.task_name,
|
||||
HumanTaskModel.task_title,
|
||||
HumanTaskModel.process_model_display_name,
|
||||
HumanTaskModel.process_instance_id,
|
||||
func.group_concat(assigned_user.username.distinct()).label(
|
||||
"potential_owner_usernames"
|
||||
),
|
||||
)
|
||||
.order_by(desc(HumanTaskModel.id)) # type: ignore
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": human_tasks.items,
|
||||
"pagination": {
|
||||
"count": len(human_tasks.items),
|
||||
"total": human_tasks.total,
|
||||
"pages": human_tasks.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def _prepare_form_data(
|
||||
form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo
|
||||
) -> str:
|
||||
"""Prepare_form_data."""
|
||||
if task_data is None:
|
||||
return ""
|
||||
|
||||
file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
|
||||
return _render_jinja_template(file_contents, task_data)
|
||||
|
||||
|
||||
def _render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str:
|
||||
"""Render_jinja_template."""
|
||||
jinja_environment = jinja2.Environment(
|
||||
autoescape=True, lstrip_blocks=True, trim_blocks=True
|
||||
)
|
||||
template = jinja_environment.from_string(unprocessed_template)
|
||||
return template.render(**data)
|
||||
|
||||
|
||||
def _get_spiff_task_from_process_instance(
|
||||
task_id: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
processor: Union[ProcessInstanceProcessor, None] = None,
|
||||
) -> SpiffTask:
|
||||
"""Get_spiff_task_from_process_instance."""
|
||||
if processor is None:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
task_uuid = uuid.UUID(task_id)
|
||||
spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
|
||||
|
||||
if spiff_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="empty_task",
|
||||
message="Processor failed to obtain task.",
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
return spiff_task
|
||||
|
||||
|
||||
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
|
||||
def _update_form_schema_with_task_data_as_needed(
|
||||
in_dict: dict, task_data: dict
|
||||
) -> None:
|
||||
"""Update_nested."""
|
||||
for k, value in in_dict.items():
|
||||
if "anyOf" == k:
|
||||
# value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"]
|
||||
if isinstance(value, list):
|
||||
if len(value) == 1:
|
||||
first_element_in_value_list = value[0]
|
||||
if isinstance(first_element_in_value_list, str):
|
||||
if first_element_in_value_list.startswith(
|
||||
"options_from_task_data_var:"
|
||||
):
|
||||
task_data_var = first_element_in_value_list.replace(
|
||||
"options_from_task_data_var:", ""
|
||||
)
|
||||
|
||||
if task_data_var not in task_data:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="missing_task_data_var",
|
||||
message=(
|
||||
"Task data is missing variable:"
|
||||
f" {task_data_var}"
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
select_options_from_task_data = task_data.get(task_data_var)
|
||||
if isinstance(select_options_from_task_data, list):
|
||||
if all(
|
||||
"value" in d and "label" in d
|
||||
for d in select_options_from_task_data
|
||||
):
|
||||
|
||||
def map_function(
|
||||
task_data_select_option: TaskDataSelectOption,
|
||||
) -> ReactJsonSchemaSelectOption:
|
||||
"""Map_function."""
|
||||
return {
|
||||
"type": "string",
|
||||
"enum": [task_data_select_option["value"]],
|
||||
"title": task_data_select_option["label"],
|
||||
}
|
||||
|
||||
options_for_react_json_schema_form = list(
|
||||
map(map_function, select_options_from_task_data)
|
||||
)
|
||||
|
||||
in_dict[k] = options_for_react_json_schema_form
|
||||
elif isinstance(value, dict):
|
||||
_update_form_schema_with_task_data_as_needed(value, task_data)
|
||||
elif isinstance(value, list):
|
||||
for o in value:
|
||||
if isinstance(o, dict):
|
||||
_update_form_schema_with_task_data_as_needed(o, task_data)
|
|
@ -16,8 +16,9 @@ from flask_bpmn.api.api_error import ApiError
|
|||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationService,
|
||||
MissingAccessTokenError,
|
||||
)
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
@ -66,16 +67,19 @@ def verify_token(
|
|||
user_model = get_user_from_decoded_internal_token(decoded_token)
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
f"Exception in verify_token getting user from decoded internal token. {e}"
|
||||
"Exception in verify_token getting user from decoded"
|
||||
f" internal token. {e}"
|
||||
)
|
||||
elif "iss" in decoded_token.keys():
|
||||
try:
|
||||
if AuthenticationService.validate_id_token(token):
|
||||
user_info = decoded_token
|
||||
except ApiError as ae: # API Error is only thrown in the token is outdated.
|
||||
except (
|
||||
ApiError
|
||||
) as ae: # API Error is only thrown in the token is outdated.
|
||||
# Try to refresh the token
|
||||
user = UserService.get_user_by_service_and_service_id(
|
||||
"open_id", decoded_token["sub"]
|
||||
decoded_token["iss"], decoded_token["sub"]
|
||||
)
|
||||
if user:
|
||||
refresh_token = AuthenticationService.get_refresh_token(user.id)
|
||||
|
@ -104,10 +108,12 @@ def verify_token(
|
|||
) from e
|
||||
|
||||
if (
|
||||
user_info is not None and "error" not in user_info
|
||||
user_info is not None
|
||||
and "error" not in user_info
|
||||
and "iss" in user_info
|
||||
): # not sure what to test yet
|
||||
user_model = (
|
||||
UserModel.query.filter(UserModel.service == "open_id")
|
||||
UserModel.query.filter(UserModel.service == user_info["iss"])
|
||||
.filter(UserModel.service_id == user_info["sub"])
|
||||
.first()
|
||||
)
|
||||
|
@ -268,10 +274,10 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
|
|||
code, "/v1.0/login_api_return"
|
||||
)
|
||||
access_token: str = auth_token_object["access_token"]
|
||||
assert access_token # noqa: S101
|
||||
if access_token is None:
|
||||
raise MissingAccessTokenError("Cannot find the access token for the request")
|
||||
|
||||
return access_token
|
||||
# return redirect("localhost:7000/v1.0/ui")
|
||||
# return {'uid': 'user_1'}
|
||||
|
||||
|
||||
def logout(id_token: str, redirect_url: Optional[str]) -> Response:
|
||||
|
@ -292,7 +298,6 @@ def get_decoded_token(token: str) -> Optional[Dict]:
|
|||
try:
|
||||
decoded_token = jwt.decode(token, options={"verify_signature": False})
|
||||
except Exception as e:
|
||||
print(f"Exception in get_token_type: {e}")
|
||||
raise ApiError(
|
||||
error_code="invalid_token", message="Cannot decode token."
|
||||
) from e
|
||||
|
@ -340,9 +345,5 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
|
|||
)
|
||||
if user:
|
||||
return user
|
||||
user = UserModel(
|
||||
username=service_id,
|
||||
service=service,
|
||||
service_id=service_id,
|
||||
)
|
||||
user = UserService.create_user(service_id, service, service_id)
|
||||
return user
|
||||
|
|
|
@ -26,6 +26,7 @@ user_blueprint = Blueprint("main", __name__)
|
|||
# user = UserService.create_user('internal', username)
|
||||
# return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON)
|
||||
|
||||
|
||||
# def _create_user(username):
|
||||
# user = UserModel.query.filter_by(username=username).first()
|
||||
# if user is not None:
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
"""Get_env."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.group import GroupNotFoundError
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
class AddUserToGroup(Script):
|
||||
"""AddUserToGroup."""
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Add a given user to a given group."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
username = args[0]
|
||||
group_identifier = args[1]
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is None:
|
||||
raise UserNotFoundError(
|
||||
f"Script 'add_user_to_group' could not find a user with username: {username}"
|
||||
)
|
||||
|
||||
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||
if group is None:
|
||||
raise GroupNotFoundError(
|
||||
f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'."
|
||||
)
|
||||
|
||||
UserService.add_user_to_group(user, group)
|
|
@ -0,0 +1,63 @@
|
|||
"""Delete_process_instances_with_criteria."""
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy import or_
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
|
||||
|
||||
class DeleteProcessInstancesWithCriteria(Script):
|
||||
"""DeleteProcessInstancesWithCriteria."""
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return "Delete process instances that match the provided criteria,"
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
criteria_list = args[0]
|
||||
|
||||
delete_criteria = []
|
||||
delete_time = time()
|
||||
|
||||
for criteria in criteria_list:
|
||||
delete_criteria.append(
|
||||
(ProcessInstanceModel.process_model_identifier == criteria["name"])
|
||||
& ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore
|
||||
& (
|
||||
ProcessInstanceModel.updated_at_in_seconds
|
||||
< (delete_time - criteria["last_updated_delta"])
|
||||
)
|
||||
)
|
||||
|
||||
results = (
|
||||
ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all()
|
||||
)
|
||||
rows_affected = len(results)
|
||||
|
||||
if rows_affected > 0:
|
||||
ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore
|
||||
|
||||
step_details = SpiffStepDetailsModel.query.filter(
|
||||
SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore
|
||||
).all()
|
||||
|
||||
for deletion in step_details:
|
||||
db.session.delete(deletion)
|
||||
for deletion in results:
|
||||
db.session.delete(deletion)
|
||||
db.session.commit()
|
||||
|
||||
return rows_affected
|
|
@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class FactService(Script):
|
||||
"""FactService."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Just your basic class that can pull in data from a few api endpoints and
|
||||
|
@ -30,7 +35,10 @@ class FactService(Script):
|
|||
if fact == "cat":
|
||||
details = "The cat in the hat" # self.get_cat()
|
||||
elif fact == "norris":
|
||||
details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants."
|
||||
details = (
|
||||
"Chuck Norris doesn’t read books. He stares them down until he gets the"
|
||||
" information he wants."
|
||||
)
|
||||
elif fact == "buzzword":
|
||||
details = "Move the Needle." # self.get_buzzword()
|
||||
else:
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
"""Get_env."""
|
||||
from collections import OrderedDict
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
|
||||
|
||||
class GetAllPermissions(Script):
|
||||
"""GetAllPermissions."""
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Get all permissions currently in the system."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
permission_assignments = (
|
||||
PermissionAssignmentModel.query.join(
|
||||
PrincipalModel,
|
||||
PrincipalModel.id == PermissionAssignmentModel.principal_id,
|
||||
)
|
||||
.join(GroupModel, GroupModel.id == PrincipalModel.group_id)
|
||||
.join(
|
||||
PermissionTargetModel,
|
||||
PermissionTargetModel.id
|
||||
== PermissionAssignmentModel.permission_target_id,
|
||||
)
|
||||
.add_columns(
|
||||
PermissionAssignmentModel.permission,
|
||||
PermissionTargetModel.uri,
|
||||
GroupModel.identifier.label("group_identifier"),
|
||||
)
|
||||
)
|
||||
|
||||
permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict()
|
||||
for pa in permission_assignments:
|
||||
permissions.setdefault((pa.group_identifier, pa.uri), []).append(
|
||||
pa.permission
|
||||
)
|
||||
|
||||
def replace_suffix(string: str, old: str, new: str) -> str:
|
||||
"""Replace_suffix."""
|
||||
if string.endswith(old):
|
||||
return string[: -len(old)] + new
|
||||
return string
|
||||
|
||||
# sort list of strings based on a specific order
|
||||
def sort_by_order(string_list: list, order: list) -> list:
|
||||
"""Sort_by_order."""
|
||||
return sorted(string_list, key=lambda x: order.index(x))
|
||||
|
||||
return [
|
||||
{
|
||||
"group_identifier": k[0],
|
||||
"uri": replace_suffix(k[1], "%", "*"),
|
||||
"permissions": sort_by_order(v, ["create", "read", "update", "delete"]),
|
||||
}
|
||||
for k, v in permissions.items()
|
||||
]
|
|
@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class GetCurrentUser(Script):
|
||||
"""GetCurrentUser."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Return the current user."""
|
||||
|
|
|
@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class GetEnv(Script):
|
||||
"""GetEnv."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Returns the current environment - ie testing, staging, production."""
|
||||
|
|
|
@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class GetFrontendUrl(Script):
|
||||
"""GetFrontendUrl."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Return the url to the frontend."""
|
||||
|
|
|
@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class GetGroupMembers(Script):
|
||||
"""GetGroupMembers."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Return the list of usernames of the users in the given group."""
|
||||
|
@ -27,7 +32,8 @@ class GetGroupMembers(Script):
|
|||
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||
if group is None:
|
||||
raise GroupNotFoundError(
|
||||
f"Script 'get_group_members' could not find group with identifier '{group_identifier}'."
|
||||
"Script 'get_group_members' could not find group with identifier"
|
||||
f" '{group_identifier}'."
|
||||
)
|
||||
|
||||
usernames = [u.username for u in group.users]
|
||||
|
|
|
@ -14,6 +14,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class GetLocaltime(Script):
|
||||
"""GetLocaltime."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Converts a Datetime object into a Datetime object for a specific timezone.
|
||||
|
|
|
@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
class GetProcessInfo(Script):
|
||||
"""GetProcessInfo."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Returns a dictionary of information about the currently running process."""
|
||||
|
@ -23,5 +28,7 @@ class GetProcessInfo(Script):
|
|||
"""Run."""
|
||||
return {
|
||||
"process_instance_id": script_attributes_context.process_instance_id,
|
||||
"process_model_identifier": script_attributes_context.process_model_identifier,
|
||||
"process_model_identifier": (
|
||||
script_attributes_context.process_model_identifier
|
||||
),
|
||||
}
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
"""Get_env."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
|
||||
|
||||
class RefreshPermissions(Script):
|
||||
"""RefreshPermissions."""
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Add permissions using a dict.
|
||||
group_info: [
|
||||
{
|
||||
'name': group_identifier,
|
||||
'users': array_of_users,
|
||||
'permissions': [
|
||||
{
|
||||
'actions': array_of_actions - create, read, etc,
|
||||
'uri': target_uri
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
group_info = args[0]
|
||||
AuthorizationService.refresh_permissions(group_info)
|
|
@ -10,9 +10,12 @@ from typing import Callable
|
|||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceNotFoundError
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
|
||||
# Generally speaking, having some global in a flask app is TERRIBLE.
|
||||
# This is here, because after loading the application this will never change under
|
||||
|
@ -20,6 +23,10 @@ from spiffworkflow_backend.models.script_attributes_context import (
|
|||
SCRIPT_SUB_CLASSES = None
|
||||
|
||||
|
||||
class ScriptUnauthorizedForUserError(Exception):
|
||||
"""ScriptUnauthorizedForUserError."""
|
||||
|
||||
|
||||
class Script:
|
||||
"""Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks."""
|
||||
|
||||
|
@ -43,6 +50,15 @@ class Script:
|
|||
+ "does not properly implement the run function.",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""It seems safer to default to True and make safe functions opt in for any user to run them.
|
||||
|
||||
To give access to script for a given user, add a 'create' permission with following target-uri:
|
||||
'/can-run-privileged-script/{script_name}'
|
||||
"""
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def generate_augmented_list(
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
|
@ -71,18 +87,52 @@ class Script:
|
|||
that we created.
|
||||
"""
|
||||
instance = subclass()
|
||||
return lambda *ar, **kw: subclass.run(
|
||||
instance,
|
||||
script_attributes_context,
|
||||
*ar,
|
||||
**kw,
|
||||
)
|
||||
|
||||
def check_script_permission() -> None:
|
||||
"""Check_script_permission."""
|
||||
if subclass.requires_privileged_permissions():
|
||||
script_function_name = get_script_function_name(subclass)
|
||||
uri = f"/can-run-privileged-script/{script_function_name}"
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=script_attributes_context.process_instance_id
|
||||
).first()
|
||||
if process_instance is None:
|
||||
raise ProcessInstanceNotFoundError(
|
||||
"Could not find a process instance with id"
|
||||
f" '{script_attributes_context.process_instance_id}' when"
|
||||
f" running script '{script_function_name}'"
|
||||
)
|
||||
user = process_instance.process_initiator
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=user, permission="create", target_uri=uri
|
||||
)
|
||||
if not has_permission:
|
||||
raise ScriptUnauthorizedForUserError(
|
||||
f"User {user.username} does not have access to run"
|
||||
f" privileged script '{script_function_name}'"
|
||||
)
|
||||
|
||||
def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
|
||||
"""Run_script_if_allowed."""
|
||||
check_script_permission()
|
||||
return subclass.run(
|
||||
instance,
|
||||
script_attributes_context,
|
||||
*ar,
|
||||
**kw,
|
||||
)
|
||||
|
||||
return run_script_if_allowed
|
||||
|
||||
def get_script_function_name(subclass: type[Script]) -> str:
|
||||
"""Get_script_function_name."""
|
||||
return subclass.__module__.split(".")[-1]
|
||||
|
||||
execlist = {}
|
||||
subclasses = Script.get_all_subclasses()
|
||||
for x in range(len(subclasses)):
|
||||
subclass = subclasses[x]
|
||||
execlist[subclass.__module__.split(".")[-1]] = make_closure(
|
||||
execlist[get_script_function_name(subclass)] = make_closure(
|
||||
subclass, script_attributes_context=script_attributes_context
|
||||
)
|
||||
return execlist
|
||||
|
@ -101,7 +151,7 @@ class Script:
|
|||
"""_get_all_subclasses."""
|
||||
# hackish mess to make sure we have all the modules loaded for the scripts
|
||||
pkg_dir = os.path.dirname(__file__)
|
||||
for (_module_loader, name, _ispkg) in pkgutil.iter_modules([pkg_dir]):
|
||||
for _module_loader, name, _ispkg in pkgutil.iter_modules([pkg_dir]):
|
||||
importlib.import_module("." + name, __package__)
|
||||
|
||||
"""Returns a list of all classes that extend this class."""
|
||||
|
|
|
@ -29,7 +29,6 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
|||
# suspended - 6 hours ago
|
||||
process_instances = []
|
||||
for i in range(len(statuses)):
|
||||
|
||||
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
test_process_model_id, user
|
||||
)
|
||||
|
|
|
@ -16,6 +16,10 @@ from werkzeug.wrappers import Response
|
|||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
|
||||
|
||||
|
||||
class MissingAccessTokenError(Exception):
|
||||
"""MissingAccessTokenError."""
|
||||
|
||||
|
||||
class AuthenticationProviderTypes(enum.Enum):
|
||||
"""AuthenticationServiceProviders."""
|
||||
|
||||
|
@ -89,7 +93,7 @@ class AuthenticationService:
|
|||
+ f"?state={state}&"
|
||||
+ "response_type=code&"
|
||||
+ f"client_id={self.client_id()}&"
|
||||
+ "scope=openid&"
|
||||
+ "scope=openid profile email&"
|
||||
+ f"redirect_uri={return_redirect_url}"
|
||||
)
|
||||
return login_redirect_url
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
"""Authorization_service."""
|
||||
import inspect
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from hashlib import sha256
|
||||
from hmac import compare_digest
|
||||
from hmac import HMAC
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import TypedDict
|
||||
from typing import Union
|
||||
|
||||
import jwt
|
||||
|
@ -16,8 +23,9 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
|||
from sqlalchemy import or_
|
||||
from sqlalchemy import text
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.principal import MissingPrincipalError
|
||||
|
@ -34,23 +42,79 @@ class PermissionsFileNotSetError(Exception):
|
|||
"""PermissionsFileNotSetError."""
|
||||
|
||||
|
||||
class ActiveTaskNotFoundError(Exception):
|
||||
"""ActiveTaskNotFoundError."""
|
||||
class HumanTaskNotFoundError(Exception):
|
||||
"""HumanTaskNotFoundError."""
|
||||
|
||||
|
||||
class UserDoesNotHaveAccessToTaskError(Exception):
|
||||
"""UserDoesNotHaveAccessToTaskError."""
|
||||
|
||||
|
||||
class InvalidPermissionError(Exception):
|
||||
"""InvalidPermissionError."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class PermissionToAssign:
|
||||
"""PermissionToAssign."""
|
||||
|
||||
permission: str
|
||||
target_uri: str
|
||||
|
||||
|
||||
# the relevant permissions are the only API methods that are currently available for each path prefix.
|
||||
# if we add further API methods, we'll need to evaluate whether they should be added here.
|
||||
PATH_SEGMENTS_FOR_PERMISSION_ALL = [
|
||||
{"path": "/logs", "relevant_permissions": ["read"]},
|
||||
{
|
||||
"path": "/process-instances",
|
||||
"relevant_permissions": ["create", "read", "delete"],
|
||||
},
|
||||
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
|
||||
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
|
||||
{"path": "/task-data", "relevant_permissions": ["read", "update"]},
|
||||
{"path": "/process-data", "relevant_permissions": ["read"]},
|
||||
]
|
||||
|
||||
|
||||
class DesiredPermissionDict(TypedDict):
|
||||
"""DesiredPermissionDict."""
|
||||
|
||||
group_identifiers: Set[str]
|
||||
permission_assignments: list[PermissionAssignmentModel]
|
||||
|
||||
|
||||
class AuthorizationService:
|
||||
"""Determine whether a user has permission to perform their request."""
|
||||
|
||||
# https://stackoverflow.com/a/71320673/6090676
|
||||
@classmethod
|
||||
def verify_sha256_token(cls, auth_header: Optional[str]) -> None:
|
||||
"""Verify_sha256_token."""
|
||||
if auth_header is None:
|
||||
raise ApiError(
|
||||
error_code="unauthorized",
|
||||
message="",
|
||||
status_code=403,
|
||||
)
|
||||
|
||||
received_sign = auth_header.split("sha256=")[-1].strip()
|
||||
secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode()
|
||||
expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest()
|
||||
if not compare_digest(received_sign, expected_sign):
|
||||
raise ApiError(
|
||||
error_code="unauthorized",
|
||||
message="",
|
||||
status_code=403,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def has_permission(
|
||||
cls, principals: list[PrincipalModel], permission: str, target_uri: str
|
||||
) -> bool:
|
||||
"""Has_permission."""
|
||||
principal_ids = [p.id for p in principals]
|
||||
target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX)
|
||||
|
||||
permission_assignments = (
|
||||
PermissionAssignmentModel.query.filter(
|
||||
|
@ -60,10 +124,13 @@ class AuthorizationService:
|
|||
.join(PermissionTargetModel)
|
||||
.filter(
|
||||
or_(
|
||||
text(f"'{target_uri}' LIKE permission_target.uri"),
|
||||
text(f"'{target_uri_normalized}' LIKE permission_target.uri"),
|
||||
# to check for exact matches as well
|
||||
# see test_user_can_access_base_path_when_given_wildcard_permission unit test
|
||||
text(f"'{target_uri}' = replace(permission_target.uri, '/%', '')"),
|
||||
text(
|
||||
f"'{target_uri_normalized}' ="
|
||||
" replace(replace(permission_target.uri, '/%', ''), ':%', '')"
|
||||
),
|
||||
)
|
||||
)
|
||||
.all()
|
||||
|
@ -103,17 +170,15 @@ class AuthorizationService:
|
|||
return cls.has_permission(principals, permission, target_uri)
|
||||
|
||||
@classmethod
|
||||
def delete_all_permissions_and_recreate(cls) -> None:
|
||||
"""Delete_all_permissions_and_recreate."""
|
||||
def delete_all_permissions(cls) -> None:
|
||||
"""Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?"""
|
||||
for model in [PermissionAssignmentModel, PermissionTargetModel]:
|
||||
db.session.query(model).delete()
|
||||
|
||||
# cascading to principals doesn't seem to work when attempting to delete all so do it like this instead
|
||||
for group in GroupModel.query.all():
|
||||
db.session.delete(group)
|
||||
|
||||
db.session.commit()
|
||||
cls.import_permissions_from_yaml_file()
|
||||
|
||||
@classmethod
|
||||
def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None:
|
||||
|
@ -131,12 +196,13 @@ class AuthorizationService:
|
|||
@classmethod
|
||||
def import_permissions_from_yaml_file(
|
||||
cls, raise_if_missing_user: bool = False
|
||||
) -> None:
|
||||
) -> DesiredPermissionDict:
|
||||
"""Import_permissions_from_yaml_file."""
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
|
||||
raise (
|
||||
PermissionsFileNotSetError(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions"
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in"
|
||||
" order to import permissions"
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -145,13 +211,16 @@ class AuthorizationService:
|
|||
permission_configs = yaml.safe_load(file)
|
||||
|
||||
default_group = None
|
||||
unique_user_group_identifiers: Set[str] = set()
|
||||
if "default_group" in permission_configs:
|
||||
default_group_identifier = permission_configs["default_group"]
|
||||
default_group = GroupService.find_or_create_group(default_group_identifier)
|
||||
unique_user_group_identifiers.add(default_group_identifier)
|
||||
|
||||
if "groups" in permission_configs:
|
||||
for group_identifier, group_config in permission_configs["groups"].items():
|
||||
group = GroupService.find_or_create_group(group_identifier)
|
||||
unique_user_group_identifiers.add(group_identifier)
|
||||
for username in group_config["users"]:
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is None:
|
||||
|
@ -164,26 +233,25 @@ class AuthorizationService:
|
|||
continue
|
||||
cls.associate_user_with_group(user, group)
|
||||
|
||||
permission_assignments = []
|
||||
if "permissions" in permission_configs:
|
||||
for _permission_identifier, permission_config in permission_configs[
|
||||
"permissions"
|
||||
].items():
|
||||
uri = permission_config["uri"]
|
||||
uri_with_percent = re.sub(r"\*", "%", uri)
|
||||
permission_target = PermissionTargetModel.query.filter_by(
|
||||
uri=uri_with_percent
|
||||
).first()
|
||||
if permission_target is None:
|
||||
permission_target = PermissionTargetModel(uri=uri_with_percent)
|
||||
db.session.add(permission_target)
|
||||
db.session.commit()
|
||||
permission_target = cls.find_or_create_permission_target(uri)
|
||||
|
||||
for allowed_permission in permission_config["allowed_permissions"]:
|
||||
if "groups" in permission_config:
|
||||
for group_identifier in permission_config["groups"]:
|
||||
group = GroupService.find_or_create_group(group_identifier)
|
||||
cls.create_permission_for_principal(
|
||||
group.principal, permission_target, allowed_permission
|
||||
unique_user_group_identifiers.add(group_identifier)
|
||||
permission_assignments.append(
|
||||
cls.create_permission_for_principal(
|
||||
group.principal,
|
||||
permission_target,
|
||||
allowed_permission,
|
||||
)
|
||||
)
|
||||
if "users" in permission_config:
|
||||
for username in permission_config["users"]:
|
||||
|
@ -194,14 +262,35 @@ class AuthorizationService:
|
|||
.filter(UserModel.username == username)
|
||||
.first()
|
||||
)
|
||||
cls.create_permission_for_principal(
|
||||
principal, permission_target, allowed_permission
|
||||
permission_assignments.append(
|
||||
cls.create_permission_for_principal(
|
||||
principal, permission_target, allowed_permission
|
||||
)
|
||||
)
|
||||
|
||||
if default_group is not None:
|
||||
for user in UserModel.query.all():
|
||||
cls.associate_user_with_group(user, default_group)
|
||||
|
||||
return {
|
||||
"group_identifiers": unique_user_group_identifiers,
|
||||
"permission_assignments": permission_assignments,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel:
|
||||
"""Find_or_create_permission_target."""
|
||||
uri_with_percent = re.sub(r"\*", "%", uri)
|
||||
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
|
||||
permission_target: Optional[PermissionTargetModel] = (
|
||||
PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
|
||||
)
|
||||
if permission_target is None:
|
||||
permission_target = PermissionTargetModel(uri=target_uri_normalized)
|
||||
db.session.add(permission_target)
|
||||
db.session.commit()
|
||||
return permission_target
|
||||
|
||||
@classmethod
|
||||
def create_permission_for_principal(
|
||||
cls,
|
||||
|
@ -210,13 +299,13 @@ class AuthorizationService:
|
|||
permission: str,
|
||||
) -> PermissionAssignmentModel:
|
||||
"""Create_permission_for_principal."""
|
||||
permission_assignment: Optional[
|
||||
PermissionAssignmentModel
|
||||
] = PermissionAssignmentModel.query.filter_by(
|
||||
principal_id=principal.id,
|
||||
permission_target_id=permission_target.id,
|
||||
permission=permission,
|
||||
).first()
|
||||
permission_assignment: Optional[PermissionAssignmentModel] = (
|
||||
PermissionAssignmentModel.query.filter_by(
|
||||
principal_id=principal.id,
|
||||
permission_target_id=permission_target.id,
|
||||
permission=permission,
|
||||
).first()
|
||||
)
|
||||
if permission_assignment is None:
|
||||
permission_assignment = PermissionAssignmentModel(
|
||||
principal_id=principal.id,
|
||||
|
@ -316,7 +405,10 @@ class AuthorizationService:
|
|||
|
||||
raise ApiError(
|
||||
error_code="unauthorized",
|
||||
message=f"User {g.user.username} is not authorized to perform requested action: {permission_string} - {request.path}",
|
||||
message=(
|
||||
f"User {g.user.username} is not authorized to perform requested action:"
|
||||
f" {permission_string} - {request.path}"
|
||||
),
|
||||
status_code=403,
|
||||
)
|
||||
|
||||
|
@ -395,7 +487,10 @@ class AuthorizationService:
|
|||
except jwt.InvalidTokenError as exception:
|
||||
raise ApiError(
|
||||
"token_invalid",
|
||||
"The Authentication token you provided is invalid. You need a new token. ",
|
||||
(
|
||||
"The Authentication token you provided is invalid. You need a new"
|
||||
" token. "
|
||||
),
|
||||
) from exception
|
||||
|
||||
@staticmethod
|
||||
|
@ -405,53 +500,69 @@ class AuthorizationService:
|
|||
user: UserModel,
|
||||
) -> bool:
|
||||
"""Assert_user_can_complete_spiff_task."""
|
||||
active_task = ActiveTaskModel.query.filter_by(
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
task_name=spiff_task.task_spec.name,
|
||||
process_instance_id=process_instance_id,
|
||||
).first()
|
||||
if active_task is None:
|
||||
raise ActiveTaskNotFoundError(
|
||||
f"Could find an active task with task name '{spiff_task.task_spec.name}'"
|
||||
if human_task is None:
|
||||
raise HumanTaskNotFoundError(
|
||||
f"Could find an human task with task name '{spiff_task.task_spec.name}'"
|
||||
f" for process instance '{process_instance_id}'"
|
||||
)
|
||||
|
||||
if user not in active_task.potential_owners:
|
||||
if user not in human_task.potential_owners:
|
||||
raise UserDoesNotHaveAccessToTaskError(
|
||||
f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'"
|
||||
f" for process instance '{process_instance_id}'"
|
||||
f"User {user.username} does not have access to update"
|
||||
f" task'{spiff_task.task_spec.name}' for process instance"
|
||||
f" '{process_instance_id}'"
|
||||
)
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def create_user_from_sign_in(cls, user_info: dict) -> UserModel:
|
||||
"""Create_user_from_sign_in."""
|
||||
"""Name, family_name, given_name, middle_name, nickname, preferred_username,"""
|
||||
"""Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """
|
||||
"""Email."""
|
||||
is_new_user = False
|
||||
user_model = (
|
||||
UserModel.query.filter(UserModel.service == "open_id")
|
||||
UserModel.query.filter(UserModel.service == user_info["iss"])
|
||||
.filter(UserModel.service_id == user_info["sub"])
|
||||
.first()
|
||||
)
|
||||
email = display_name = username = ""
|
||||
if "email" in user_info:
|
||||
username = user_info["email"]
|
||||
email = user_info["email"]
|
||||
else: # we fall back to the sub, which may be very ugly.
|
||||
username = user_info["sub"] + "@" + user_info["iss"]
|
||||
|
||||
if "preferred_username" in user_info:
|
||||
display_name = user_info["preferred_username"]
|
||||
elif "nickname" in user_info:
|
||||
display_name = user_info["nickname"]
|
||||
elif "name" in user_info:
|
||||
display_name = user_info["name"]
|
||||
|
||||
if user_model is None:
|
||||
current_app.logger.debug("create_user in login_return")
|
||||
is_new_user = True
|
||||
name = username = email = ""
|
||||
if "name" in user_info:
|
||||
name = user_info["name"]
|
||||
if "username" in user_info:
|
||||
username = user_info["username"]
|
||||
elif "preferred_username" in user_info:
|
||||
username = user_info["preferred_username"]
|
||||
if "email" in user_info:
|
||||
email = user_info["email"]
|
||||
user_model = UserService().create_user(
|
||||
service="open_id",
|
||||
service_id=user_info["sub"],
|
||||
name=name,
|
||||
username=username,
|
||||
service=user_info["iss"],
|
||||
service_id=user_info["sub"],
|
||||
email=email,
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
else:
|
||||
# Update with the latest information
|
||||
user_model.username = username
|
||||
user_model.email = email
|
||||
user_model.display_name = display_name
|
||||
user_model.service = user_info["iss"]
|
||||
user_model.service_id = user_info["sub"]
|
||||
|
||||
# this may eventually get too slow.
|
||||
# when it does, be careful about backgrounding, because
|
||||
# the user will immediately need permissions to use the site.
|
||||
|
@ -461,11 +572,229 @@ class AuthorizationService:
|
|||
cls.import_permissions_from_yaml_file()
|
||||
|
||||
if is_new_user:
|
||||
UserService.add_user_to_active_tasks_if_appropriate(user_model)
|
||||
UserService.add_user_to_human_tasks_if_appropriate(user_model)
|
||||
|
||||
# this cannot be None so ignore mypy
|
||||
return user_model # type: ignore
|
||||
|
||||
@classmethod
|
||||
def get_permissions_to_assign(
|
||||
cls,
|
||||
permission_set: str,
|
||||
process_related_path_segment: str,
|
||||
target_uris: list[str],
|
||||
) -> list[PermissionToAssign]:
|
||||
"""Get_permissions_to_assign."""
|
||||
permissions = permission_set.split(",")
|
||||
if permission_set == "all":
|
||||
permissions = ["create", "read", "update", "delete"]
|
||||
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
|
||||
# we were thinking that if you can start an instance, you ought to be able to view your own instances.
|
||||
if permission_set == "start":
|
||||
target_uri = f"/process-instances/{process_related_path_segment}"
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="create", target_uri=target_uri)
|
||||
)
|
||||
target_uri = f"/process-instances/for-me/{process_related_path_segment}"
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||
)
|
||||
|
||||
else:
|
||||
if permission_set == "all":
|
||||
for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
|
||||
target_uri = (
|
||||
f"{path_segment_dict['path']}/{process_related_path_segment}"
|
||||
)
|
||||
relevant_permissions = path_segment_dict["relevant_permissions"]
|
||||
for permission in relevant_permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission=permission, target_uri=target_uri
|
||||
)
|
||||
)
|
||||
|
||||
for target_uri in target_uris:
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri=target_uri)
|
||||
)
|
||||
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def explode_permissions(
|
||||
cls, permission_set: str, target: str
|
||||
) -> list[PermissionToAssign]:
|
||||
"""Explodes given permissions to and returns list of PermissionToAssign objects.
|
||||
|
||||
These can be used to then iterate through and inserted into the database.
|
||||
Target Macros:
|
||||
ALL
|
||||
* gives access to ALL api endpoints - useful to give admin-like permissions
|
||||
PG:[process_group_identifier]
|
||||
* affects given process-group and all sub process-groups and process-models
|
||||
PM:[process_model_identifier]
|
||||
* affects given process-model
|
||||
BASIC
|
||||
* Basic access to complete tasks and use the site
|
||||
|
||||
Permission Macros:
|
||||
all
|
||||
* create, read, update, delete
|
||||
start
|
||||
* create process-instances (aka instantiate or start a process-model)
|
||||
* only works with PG and PM target macros
|
||||
"""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
permissions = permission_set.split(",")
|
||||
if permission_set == "all":
|
||||
permissions = ["create", "read", "update", "delete"]
|
||||
|
||||
if target.startswith("PG:"):
|
||||
process_group_identifier = (
|
||||
target.removeprefix("PG:").replace("/", ":").removeprefix(":")
|
||||
)
|
||||
process_related_path_segment = f"{process_group_identifier}:*"
|
||||
if process_group_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
target_uris = [
|
||||
f"/process-groups/{process_related_path_segment}",
|
||||
f"/process-models/{process_related_path_segment}",
|
||||
]
|
||||
permissions_to_assign = (
|
||||
permissions_to_assign
|
||||
+ cls.get_permissions_to_assign(
|
||||
permission_set, process_related_path_segment, target_uris
|
||||
)
|
||||
)
|
||||
|
||||
elif target.startswith("PM:"):
|
||||
process_model_identifier = (
|
||||
target.removeprefix("PM:").replace("/", ":").removeprefix(":")
|
||||
)
|
||||
process_related_path_segment = f"{process_model_identifier}/*"
|
||||
|
||||
if process_model_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
|
||||
target_uris = [f"/process-models/{process_related_path_segment}"]
|
||||
permissions_to_assign = (
|
||||
permissions_to_assign
|
||||
+ cls.get_permissions_to_assign(
|
||||
permission_set, process_related_path_segment, target_uris
|
||||
)
|
||||
)
|
||||
|
||||
elif permission_set == "start":
|
||||
raise InvalidPermissionError(
|
||||
"Permission 'start' is only available for macros PM and PG."
|
||||
)
|
||||
|
||||
elif target.startswith("BASIC"):
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/process-instances/for-me"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/processes")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/service-tasks")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/user-groups/for-current-user"
|
||||
)
|
||||
)
|
||||
|
||||
for permission in ["create", "read", "update", "delete"]:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission=permission, target_uri="/process-instances/reports/*"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri="/tasks/*")
|
||||
)
|
||||
elif target == "ALL":
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri="/*")
|
||||
)
|
||||
elif target.startswith("/"):
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri=target)
|
||||
)
|
||||
else:
|
||||
raise InvalidPermissionError(
|
||||
f"Target uri '{target}' with permission set '{permission_set}' is"
|
||||
" invalid. The target uri must either be a macro of PG, PM, BASIC, or"
|
||||
" ALL or an api uri."
|
||||
)
|
||||
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def add_permission_from_uri_or_macro(
|
||||
cls, group_identifier: str, permission: str, target: str
|
||||
) -> list[PermissionAssignmentModel]:
|
||||
"""Add_permission_from_uri_or_macro."""
|
||||
group = GroupService.find_or_create_group(group_identifier)
|
||||
permissions_to_assign = cls.explode_permissions(permission, target)
|
||||
permission_assignments = []
|
||||
for permission_to_assign in permissions_to_assign:
|
||||
permission_target = cls.find_or_create_permission_target(
|
||||
permission_to_assign.target_uri
|
||||
)
|
||||
permission_assignments.append(
|
||||
cls.create_permission_for_principal(
|
||||
group.principal, permission_target, permission_to_assign.permission
|
||||
)
|
||||
)
|
||||
return permission_assignments
|
||||
|
||||
@classmethod
|
||||
def refresh_permissions(cls, group_info: list[dict[str, Any]]) -> None:
|
||||
"""Adds new permission assignments and deletes old ones."""
|
||||
initial_permission_assignments = PermissionAssignmentModel.query.all()
|
||||
result = cls.import_permissions_from_yaml_file()
|
||||
desired_permission_assignments = result["permission_assignments"]
|
||||
desired_group_identifiers = result["group_identifiers"]
|
||||
|
||||
for group in group_info:
|
||||
group_identifier = group["name"]
|
||||
for username in group["users"]:
|
||||
GroupService.add_user_to_group_or_add_to_waiting(
|
||||
username, group_identifier
|
||||
)
|
||||
desired_group_identifiers.add(group_identifier)
|
||||
for permission in group["permissions"]:
|
||||
for crud_op in permission["actions"]:
|
||||
desired_permission_assignments.extend(
|
||||
cls.add_permission_from_uri_or_macro(
|
||||
group_identifier=group_identifier,
|
||||
target=permission["uri"],
|
||||
permission=crud_op,
|
||||
)
|
||||
)
|
||||
desired_group_identifiers.add(group_identifier)
|
||||
|
||||
for ipa in initial_permission_assignments:
|
||||
if ipa not in desired_permission_assignments:
|
||||
db.session.delete(ipa)
|
||||
|
||||
groups_to_delete = GroupModel.query.filter(
|
||||
GroupModel.identifier.not_in(desired_group_identifiers)
|
||||
).all()
|
||||
for gtd in groups_to_delete:
|
||||
db.session.delete(gtd)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
class KeycloakAuthorization:
|
||||
"""Interface with Keycloak server."""
|
||||
|
|
|
@ -40,10 +40,9 @@ class FileSystemService:
|
|||
@staticmethod
|
||||
def root_path() -> str:
|
||||
"""Root_path."""
|
||||
# fixme: allow absolute files
|
||||
dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
app_root = current_app.root_path
|
||||
return os.path.abspath(os.path.join(app_root, "..", dir_name))
|
||||
# ensure this is a string - thanks mypy...
|
||||
return os.path.abspath(os.path.join(dir_name, ""))
|
||||
|
||||
@staticmethod
|
||||
def id_string_to_relative_path(id_string: str) -> str:
|
||||
|
|
|
@ -1,54 +1,89 @@
|
|||
"""Git_service."""
|
||||
import os
|
||||
import shutil
|
||||
import subprocess # noqa we need the subprocess module to safely run the git commands
|
||||
import uuid
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
|
||||
from spiffworkflow_backend.config import ConfigurationError
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
|
||||
|
||||
class MissingGitConfigsError(Exception):
|
||||
"""MissingGitConfigsError."""
|
||||
|
||||
|
||||
class InvalidGitWebhookBodyError(Exception):
|
||||
"""InvalidGitWebhookBodyError."""
|
||||
|
||||
|
||||
class GitCloneUrlMismatchError(Exception):
|
||||
"""GitCloneUrlMismatchError."""
|
||||
|
||||
|
||||
class GitCommandError(Exception):
|
||||
"""GitCommandError."""
|
||||
|
||||
|
||||
# TOOD: check for the existence of git and configs on bootup if publishing is enabled
|
||||
class GitService:
|
||||
"""GitService."""
|
||||
|
||||
@staticmethod
|
||||
def get_current_revision() -> str:
|
||||
@classmethod
|
||||
def get_current_revision(cls) -> str:
|
||||
"""Get_current_revision."""
|
||||
bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
# The value includes a carriage return character at the end, so we don't grab the last character
|
||||
current_git_revision = os.popen( # noqa: S605
|
||||
f"cd {bpmn_spec_absolute_dir} && git rev-parse --short HEAD"
|
||||
).read()[
|
||||
:-1
|
||||
] # noqa: S605
|
||||
return current_git_revision
|
||||
with FileSystemService.cd(bpmn_spec_absolute_dir):
|
||||
return cls.run_shell_command_to_get_stdout(
|
||||
["git", "rev-parse", "--short", "HEAD"]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@classmethod
|
||||
def get_instance_file_contents_for_revision(
|
||||
process_model: ProcessModelInfo, revision: str
|
||||
) -> bytes:
|
||||
cls,
|
||||
process_model: ProcessModelInfo,
|
||||
revision: str,
|
||||
file_name: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Get_instance_file_contents_for_revision."""
|
||||
bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model
|
||||
)
|
||||
shell_cd_command = f"cd {bpmn_spec_absolute_dir}"
|
||||
shell_git_command = f"git show {revision}:{process_model_relative_path}/{process_model.primary_file_name}"
|
||||
shell_command = f"{shell_cd_command} && {shell_git_command}"
|
||||
# git show 78ae5eb:category_number_one/script-task/script-task.bpmn
|
||||
file_contents: str = os.popen(shell_command).read()[:-1] # noqa: S605
|
||||
assert file_contents # noqa: S101
|
||||
return file_contents.encode("utf-8")
|
||||
file_name_to_use = file_name
|
||||
if file_name_to_use is None:
|
||||
file_name_to_use = process_model.primary_file_name
|
||||
with FileSystemService.cd(bpmn_spec_absolute_dir):
|
||||
shell_command = [
|
||||
"git",
|
||||
"show",
|
||||
f"{revision}:{process_model_relative_path}/{file_name_to_use}",
|
||||
]
|
||||
return cls.run_shell_command_to_get_stdout(shell_command)
|
||||
|
||||
@staticmethod
|
||||
def commit(message: str, repo_path: Optional[str] = None) -> str:
|
||||
@classmethod
|
||||
def commit(
|
||||
cls,
|
||||
message: str,
|
||||
repo_path: Optional[str] = None,
|
||||
branch_name: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Commit."""
|
||||
cls.check_for_basic_configs()
|
||||
branch_name_to_use = branch_name
|
||||
if branch_name_to_use is None:
|
||||
branch_name_to_use = current_app.config["GIT_BRANCH"]
|
||||
repo_path_to_use = repo_path
|
||||
if repo_path is None:
|
||||
repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
if repo_path_to_use is None:
|
||||
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
||||
|
||||
git_username = ""
|
||||
git_email = ""
|
||||
|
@ -58,13 +93,121 @@ class GitService:
|
|||
shell_command_path = os.path.join(
|
||||
current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo"
|
||||
)
|
||||
shell_command = f"{shell_command_path} '{repo_path_to_use}' '{message}' '{git_username}' '{git_email}'"
|
||||
output = os.popen(shell_command).read() # noqa: S605
|
||||
return output
|
||||
shell_command = [
|
||||
shell_command_path,
|
||||
repo_path_to_use,
|
||||
message,
|
||||
branch_name_to_use,
|
||||
git_username,
|
||||
git_email,
|
||||
current_app.config["GIT_USER_PASSWORD"],
|
||||
]
|
||||
return cls.run_shell_command_to_get_stdout(shell_command)
|
||||
|
||||
@classmethod
|
||||
def check_for_basic_configs(cls) -> None:
|
||||
"""Check_for_basic_configs."""
|
||||
if current_app.config["GIT_BRANCH"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_BRANCH. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def check_for_publish_configs(cls) -> None:
|
||||
"""Check_for_configs."""
|
||||
cls.check_for_basic_configs()
|
||||
if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
if current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_CLONE_URL_FOR_PUBLISHING. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def run_shell_command_as_boolean(cls, command: list[str]) -> bool:
|
||||
"""Run_shell_command_as_boolean."""
|
||||
# we know result will be a bool here
|
||||
result: bool = cls.run_shell_command(command, return_success_state=True) # type: ignore
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def run_shell_command_to_get_stdout(cls, command: list[str]) -> str:
|
||||
"""Run_shell_command_to_get_stdout."""
|
||||
# we know result will be a CompletedProcess here
|
||||
result: subprocess.CompletedProcess[bytes] = cls.run_shell_command(
|
||||
command, return_success_state=False
|
||||
) # type: ignore
|
||||
return result.stdout.decode("utf-8").strip()
|
||||
|
||||
@classmethod
|
||||
def run_shell_command(
|
||||
cls, command: list[str], return_success_state: bool = False
|
||||
) -> Union[subprocess.CompletedProcess[bytes], bool]:
|
||||
"""Run_shell_command."""
|
||||
# this is fine since we pass the commands directly
|
||||
result = subprocess.run(command, check=False, capture_output=True) # noqa
|
||||
if return_success_state:
|
||||
return result.returncode == 0
|
||||
|
||||
if result.returncode != 0:
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
stderr = result.stderr.decode("utf-8")
|
||||
raise GitCommandError(
|
||||
f"Failed to execute git command: {command} "
|
||||
f"Stdout: {stdout} "
|
||||
f"Stderr: {stderr} "
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
# only supports github right now
|
||||
@classmethod
|
||||
def handle_web_hook(cls, webhook: dict) -> bool:
|
||||
"""Handle_web_hook."""
|
||||
cls.check_for_publish_configs()
|
||||
|
||||
if "repository" not in webhook or "clone_url" not in webhook["repository"]:
|
||||
raise InvalidGitWebhookBodyError(
|
||||
"Cannot find required keys of 'repository:clone_url' from webhook"
|
||||
f" body: {webhook}"
|
||||
)
|
||||
|
||||
clone_url = webhook["repository"]["clone_url"]
|
||||
if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]:
|
||||
raise GitCloneUrlMismatchError(
|
||||
"Configured clone url does not match clone url from webhook:"
|
||||
f" {clone_url}"
|
||||
)
|
||||
|
||||
if "ref" not in webhook:
|
||||
raise InvalidGitWebhookBodyError(
|
||||
f"Could not find the 'ref' arg in the webhook boy: {webhook}"
|
||||
)
|
||||
|
||||
if current_app.config["GIT_BRANCH"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_BRANCH. This is required for updating the"
|
||||
" repository as a result of the webhook"
|
||||
)
|
||||
|
||||
ref = webhook["ref"]
|
||||
git_branch = current_app.config["GIT_BRANCH"]
|
||||
if ref != f"refs/heads/{git_branch}":
|
||||
return False
|
||||
|
||||
with FileSystemService.cd(current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]):
|
||||
cls.run_shell_command(["git", "pull"])
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def publish(cls, process_model_id: str, branch_to_update: str) -> str:
|
||||
"""Publish."""
|
||||
cls.check_for_publish_configs()
|
||||
source_process_model_root = FileSystemService.root_path()
|
||||
source_process_model_path = os.path.join(
|
||||
source_process_model_root, process_model_id
|
||||
|
@ -76,21 +219,29 @@ class GitService:
|
|||
# we are adding a guid to this so the flake8 issue has been mitigated
|
||||
destination_process_root = f"/tmp/{clone_dir}" # noqa
|
||||
|
||||
cmd = (
|
||||
f"git clone https://{current_app.config['GIT_USERNAME']}:{current_app.config['GIT_USER_PASSWORD']}"
|
||||
f"@github.com/sartography/sample-process-models.git {destination_process_root}"
|
||||
git_clone_url = current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"].replace(
|
||||
"https://",
|
||||
f"https://{current_app.config['GIT_USERNAME']}:{current_app.config['GIT_USER_PASSWORD']}@",
|
||||
)
|
||||
os.system(cmd) # noqa: S605
|
||||
cmd = ["git", "clone", git_clone_url, destination_process_root]
|
||||
|
||||
cls.run_shell_command(cmd)
|
||||
with FileSystemService.cd(destination_process_root):
|
||||
# create publish branch from branch_to_update
|
||||
os.system(f"git checkout {branch_to_update}") # noqa: S605
|
||||
publish_branch = f"publish-{process_model_id}"
|
||||
command = f"git show-ref --verify refs/remotes/origin/{publish_branch}"
|
||||
output = os.popen(command).read() # noqa: S605
|
||||
if output:
|
||||
os.system(f"git checkout {publish_branch}") # noqa: S605
|
||||
cls.run_shell_command(["git", "checkout", branch_to_update])
|
||||
branch_to_pull_request = f"publish-{process_model_id}"
|
||||
|
||||
# check if branch exists and checkout appropriately
|
||||
command = [
|
||||
"git",
|
||||
"show-ref",
|
||||
"--verify",
|
||||
f"refs/remotes/origin/{branch_to_pull_request}",
|
||||
]
|
||||
if cls.run_shell_command_as_boolean(command):
|
||||
cls.run_shell_command(["git", "checkout", branch_to_pull_request])
|
||||
else:
|
||||
os.system(f"git checkout -b {publish_branch}") # noqa: S605
|
||||
cls.run_shell_command(["git", "checkout", "-b", branch_to_pull_request])
|
||||
|
||||
# copy files from process model into the new publish branch
|
||||
destination_process_model_path = os.path.join(
|
||||
|
@ -100,15 +251,19 @@ class GitService:
|
|||
shutil.rmtree(destination_process_model_path)
|
||||
shutil.copytree(source_process_model_path, destination_process_model_path)
|
||||
|
||||
# add and commit files to publish_branch, then push
|
||||
commit_message = f"Request to publish changes to {process_model_id}, from {g.user.username}"
|
||||
cls.commit(commit_message, destination_process_root)
|
||||
os.system("git push") # noqa
|
||||
# add and commit files to branch_to_pull_request, then push
|
||||
commit_message = (
|
||||
f"Request to publish changes to {process_model_id}, "
|
||||
f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}"
|
||||
)
|
||||
cls.commit(commit_message, destination_process_root, branch_to_pull_request)
|
||||
|
||||
# build url for github page to open PR
|
||||
output = os.popen("git config --get remote.origin.url").read() # noqa
|
||||
remote_url = output.strip().replace(".git", "")
|
||||
pr_url = f"{remote_url}/compare/{publish_branch}?expand=1"
|
||||
git_remote = cls.run_shell_command_to_get_stdout(
|
||||
["git", "config", "--get", "remote.origin.url"]
|
||||
)
|
||||
remote_url = git_remote.strip().replace(".git", "")
|
||||
pr_url = f"{remote_url}/compare/{branch_to_update}...{branch_to_pull_request}?expand=1"
|
||||
|
||||
# try to clean up
|
||||
if os.path.exists(destination_process_root):
|
||||
|
|
|
@ -4,6 +4,7 @@ from typing import Optional
|
|||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
|
@ -22,3 +23,15 @@ class GroupService:
|
|||
db.session.commit()
|
||||
UserService.create_principal(group.id, id_column_name="group_id")
|
||||
return group
|
||||
|
||||
@classmethod
|
||||
def add_user_to_group_or_add_to_waiting(
|
||||
cls, username: str, group_identifier: str
|
||||
) -> None:
|
||||
"""Add_user_to_group_or_add_to_waiting."""
|
||||
group = cls.find_or_create_group(group_identifier)
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user:
|
||||
UserService.add_user_to_group(user, group)
|
||||
else:
|
||||
UserService.add_waiting_group_assignment(username, group)
|
||||
|
|
|
@ -122,7 +122,8 @@ def setup_logger(app: Flask) -> None:
|
|||
|
||||
if upper_log_level_string not in log_levels:
|
||||
raise InvalidLogLevelError(
|
||||
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}"
|
||||
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are"
|
||||
f" {log_levels}"
|
||||
)
|
||||
|
||||
log_level = getattr(logging, upper_log_level_string)
|
||||
|
@ -176,7 +177,8 @@ def setup_logger(app: Flask) -> None:
|
|||
spiff_logger = logging.getLogger("spiff")
|
||||
spiff_logger.setLevel(spiff_log_level)
|
||||
spiff_formatter = logging.Formatter(
|
||||
"%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s"
|
||||
"%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |"
|
||||
" %(process)s | %(processName)s | %(process_instance_id)s"
|
||||
)
|
||||
|
||||
# if you add a handler to spiff, it will be used/inherited by spiff.metrics
|
||||
|
|
|
@ -145,8 +145,11 @@ class MessageService:
|
|||
if process_instance_receive is None:
|
||||
raise MessageServiceError(
|
||||
(
|
||||
f"Process instance cannot be found for queued message: {message_instance_receive.id}."
|
||||
f"Tried with id {message_instance_receive.process_instance_id}",
|
||||
(
|
||||
"Process instance cannot be found for queued message:"
|
||||
f" {message_instance_receive.id}.Tried with id"
|
||||
f" {message_instance_receive.process_instance_id}"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -182,7 +185,6 @@ class MessageService:
|
|||
)
|
||||
|
||||
for message_instance_receive in message_instances_receive:
|
||||
|
||||
# sqlalchemy supports select / where statements like active record apparantly
|
||||
# https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions
|
||||
message_correlation_select = (
|
||||
|
|
|
@ -17,6 +17,7 @@ from typing import Optional
|
|||
from typing import Tuple
|
||||
from typing import TypedDict
|
||||
from typing import Union
|
||||
from uuid import UUID
|
||||
|
||||
import dateparser
|
||||
import pytz
|
||||
|
@ -43,6 +44,9 @@ from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
|||
CallActivityTaskConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
EventBasedGatewayConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
IntermediateCatchEventConverter,
|
||||
)
|
||||
|
@ -65,11 +69,11 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
|||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.file import File
|
||||
from spiffworkflow_backend.models.file import FileType
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import (
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
|
@ -151,6 +155,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
"time": time,
|
||||
"decimal": decimal,
|
||||
"_strptime": _strptime,
|
||||
"enumerate": enumerate,
|
||||
"list": list,
|
||||
"map": map,
|
||||
}
|
||||
|
||||
# This will overwrite the standard builtins
|
||||
|
@ -209,14 +216,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
except Exception as exception:
|
||||
if task is None:
|
||||
raise ProcessInstanceProcessorError(
|
||||
"Error evaluating expression: "
|
||||
"'%s', exception: %s" % (expression, str(exception)),
|
||||
"Error evaluating expression: '%s', exception: %s"
|
||||
% (expression, str(exception)),
|
||||
) from exception
|
||||
else:
|
||||
raise WorkflowTaskExecException(
|
||||
task,
|
||||
"Error evaluating expression "
|
||||
"'%s', %s" % (expression, str(exception)),
|
||||
"Error evaluating expression '%s', %s"
|
||||
% (expression, str(exception)),
|
||||
) from exception
|
||||
|
||||
def execute(
|
||||
|
@ -263,6 +270,7 @@ class ProcessInstanceProcessor:
|
|||
EndEventConverter,
|
||||
IntermediateCatchEventConverter,
|
||||
IntermediateThrowEventConverter,
|
||||
EventBasedGatewayConverter,
|
||||
ManualTaskConverter,
|
||||
NoneTaskConverter,
|
||||
ReceiveTaskConverter,
|
||||
|
@ -276,6 +284,7 @@ class ProcessInstanceProcessor:
|
|||
]
|
||||
)
|
||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||
_event_serializer = EventBasedGatewayConverter()
|
||||
|
||||
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
||||
VALIDATION_PROCESS_KEY = "validate_only"
|
||||
|
@ -292,9 +301,7 @@ class ProcessInstanceProcessor:
|
|||
tld.spiff_step = process_instance_model.spiff_step
|
||||
|
||||
# we want this to be the fully qualified path to the process model including all group subcomponents
|
||||
current_app.config[
|
||||
"THREAD_LOCAL_DATA"
|
||||
].process_model_identifier = (
|
||||
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
|
||||
f"{process_instance_model.process_model_identifier}"
|
||||
)
|
||||
|
||||
|
@ -375,8 +382,10 @@ class ProcessInstanceProcessor:
|
|||
except MissingSpecError as ke:
|
||||
raise ApiError(
|
||||
error_code="unexpected_process_instance_structure",
|
||||
message="Failed to deserialize process_instance"
|
||||
" '%s' due to a mis-placed or missing task '%s'"
|
||||
message=(
|
||||
"Failed to deserialize process_instance"
|
||||
" '%s' due to a mis-placed or missing task '%s'"
|
||||
)
|
||||
% (self.process_model_identifier, str(ke)),
|
||||
) from ke
|
||||
|
||||
|
@ -392,7 +401,10 @@ class ProcessInstanceProcessor:
|
|||
raise (
|
||||
ApiError(
|
||||
"process_model_not_found",
|
||||
f"The given process model was not found: {process_model_identifier}.",
|
||||
(
|
||||
"The given process model was not found:"
|
||||
f" {process_model_identifier}."
|
||||
),
|
||||
)
|
||||
)
|
||||
spec_files = SpecFileService.get_files(process_model_info)
|
||||
|
@ -522,8 +534,11 @@ class ProcessInstanceProcessor:
|
|||
potential_owner_ids.append(lane_owner_user.id)
|
||||
self.raise_if_no_potential_owners(
|
||||
potential_owner_ids,
|
||||
f"No users found in task data lane owner list for lane: {task_lane}. "
|
||||
f"The user list used: {task.data['lane_owners'][task_lane]}",
|
||||
(
|
||||
"No users found in task data lane owner list for lane:"
|
||||
f" {task_lane}. The user list used:"
|
||||
f" {task.data['lane_owners'][task_lane]}"
|
||||
),
|
||||
)
|
||||
else:
|
||||
group_model = GroupModel.query.filter_by(identifier=task_lane).first()
|
||||
|
@ -551,14 +566,14 @@ class ProcessInstanceProcessor:
|
|||
"""SaveSpiffStepDetails."""
|
||||
bpmn_json = self.serialize()
|
||||
wf_json = json.loads(bpmn_json)
|
||||
task_json = wf_json["tasks"]
|
||||
task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]}
|
||||
|
||||
return {
|
||||
"process_instance_id": self.process_instance_model.id,
|
||||
"spiff_step": self.process_instance_model.spiff_step or 1,
|
||||
"task_json": task_json,
|
||||
"timestamp": round(time.time()),
|
||||
"completed_by_user_id": self.current_user().id,
|
||||
# "completed_by_user_id": self.current_user().id,
|
||||
}
|
||||
|
||||
def spiff_step_details(self) -> SpiffStepDetailsModel:
|
||||
|
@ -569,17 +584,10 @@ class ProcessInstanceProcessor:
|
|||
spiff_step=details_mapping["spiff_step"],
|
||||
task_json=details_mapping["task_json"],
|
||||
timestamp=details_mapping["timestamp"],
|
||||
completed_by_user_id=details_mapping["completed_by_user_id"],
|
||||
# completed_by_user_id=details_mapping["completed_by_user_id"],
|
||||
)
|
||||
return details_model
|
||||
|
||||
def save_spiff_step_details(self, active_task: ActiveTaskModel) -> None:
|
||||
"""SaveSpiffStepDetails."""
|
||||
details_model = self.spiff_step_details()
|
||||
details_model.lane_assignment_id = active_task.lane_assignment_id
|
||||
db.session.add(details_model)
|
||||
db.session.commit()
|
||||
|
||||
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
|
||||
"""Extract_metadata."""
|
||||
metadata_extraction_paths = process_model_info.metadata_extraction_paths
|
||||
|
@ -615,7 +623,7 @@ class ProcessInstanceProcessor:
|
|||
db.session.add(pim)
|
||||
db.session.commit()
|
||||
|
||||
def save(self) -> None:
|
||||
def _save(self) -> None:
|
||||
"""Saves the current state of this processor to the database."""
|
||||
self.process_instance_model.bpmn_json = self.serialize()
|
||||
|
||||
|
@ -637,7 +645,10 @@ class ProcessInstanceProcessor:
|
|||
db.session.add(self.process_instance_model)
|
||||
db.session.commit()
|
||||
|
||||
active_tasks = ActiveTaskModel.query.filter_by(
|
||||
def save(self) -> None:
|
||||
"""Saves the current state and moves on to the next state."""
|
||||
self._save()
|
||||
human_tasks = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id
|
||||
).all()
|
||||
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
|
||||
|
@ -668,14 +679,14 @@ class ProcessInstanceProcessor:
|
|||
if "formUiSchemaFilename" in properties:
|
||||
ui_form_file_name = properties["formUiSchemaFilename"]
|
||||
|
||||
active_task = None
|
||||
for at in active_tasks:
|
||||
human_task = None
|
||||
for at in human_tasks:
|
||||
if at.task_id == str(ready_or_waiting_task.id):
|
||||
active_task = at
|
||||
active_tasks.remove(at)
|
||||
human_task = at
|
||||
human_tasks.remove(at)
|
||||
|
||||
if active_task is None:
|
||||
active_task = ActiveTaskModel(
|
||||
if human_task is None:
|
||||
human_task = HumanTaskModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
process_model_display_name=process_model_display_name,
|
||||
form_file_name=form_file_name,
|
||||
|
@ -687,23 +698,65 @@ class ProcessInstanceProcessor:
|
|||
task_status=ready_or_waiting_task.get_state_name(),
|
||||
lane_assignment_id=potential_owner_hash["lane_assignment_id"],
|
||||
)
|
||||
db.session.add(active_task)
|
||||
db.session.add(human_task)
|
||||
db.session.commit()
|
||||
|
||||
for potential_owner_id in potential_owner_hash[
|
||||
"potential_owner_ids"
|
||||
]:
|
||||
active_task_user = ActiveTaskUserModel(
|
||||
user_id=potential_owner_id, active_task_id=active_task.id
|
||||
human_task_user = HumanTaskUserModel(
|
||||
user_id=potential_owner_id, human_task_id=human_task.id
|
||||
)
|
||||
db.session.add(active_task_user)
|
||||
db.session.add(human_task_user)
|
||||
db.session.commit()
|
||||
|
||||
if len(active_tasks) > 0:
|
||||
for at in active_tasks:
|
||||
db.session.delete(at)
|
||||
if len(human_tasks) > 0:
|
||||
for at in human_tasks:
|
||||
at.completed = True
|
||||
db.session.add(at)
|
||||
db.session.commit()
|
||||
|
||||
def serialize_task_spec(self, task_spec: SpiffTask) -> Any:
|
||||
"""Get a serialized version of a task spec."""
|
||||
# The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask
|
||||
# Not sure why mypy accepts this but whatever.
|
||||
return self._serializer.spec_converter.convert(task_spec)
|
||||
|
||||
def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
|
||||
"""Send an event to the workflow."""
|
||||
payload = event_data.pop("payload", None)
|
||||
event_definition = self._event_serializer.restore(event_data)
|
||||
if payload is not None:
|
||||
event_definition.payload = payload
|
||||
current_app.logger.info(
|
||||
f"Event of type {event_definition.event_type} sent to process instance"
|
||||
f" {self.process_instance_model.id}"
|
||||
)
|
||||
self.bpmn_process_instance.catch(event_definition)
|
||||
self.do_engine_steps(save=True)
|
||||
|
||||
def manual_complete_task(self, task_id: str, execute: bool) -> None:
|
||||
"""Mark the task complete optionally executing it."""
|
||||
spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
|
||||
if execute:
|
||||
current_app.logger.info(
|
||||
f"Manually executing Task {spiff_task.task_spec.name} of process"
|
||||
f" instance {self.process_instance_model.id}"
|
||||
)
|
||||
spiff_task.complete()
|
||||
else:
|
||||
current_app.logger.info(
|
||||
f"Skipping Task {spiff_task.task_spec.name} of process instance"
|
||||
f" {self.process_instance_model.id}"
|
||||
)
|
||||
spiff_task._set_state(TaskState.COMPLETED)
|
||||
for child in spiff_task.children:
|
||||
child.task_spec._update(child)
|
||||
self.bpmn_process_instance.last_task = spiff_task
|
||||
self._save()
|
||||
# Saving the workflow seems to reset the status
|
||||
self.suspend()
|
||||
|
||||
@staticmethod
|
||||
def get_parser() -> MyCustomParser:
|
||||
"""Get_parser."""
|
||||
|
@ -738,14 +791,13 @@ class ProcessInstanceProcessor:
|
|||
"""Bpmn_file_full_path_from_bpmn_process_identifier."""
|
||||
if bpmn_process_identifier is None:
|
||||
raise ValueError(
|
||||
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
|
||||
"bpmn_file_full_path_from_bpmn_process_identifier:"
|
||||
" bpmn_process_identifier is unexpectedly None"
|
||||
)
|
||||
|
||||
spec_reference = (
|
||||
SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier)
|
||||
.filter_by(type="process")
|
||||
.first()
|
||||
)
|
||||
spec_reference = SpecReferenceCache.query.filter_by(
|
||||
identifier=bpmn_process_identifier, type="process"
|
||||
).first()
|
||||
bpmn_file_full_path = None
|
||||
if spec_reference is None:
|
||||
bpmn_file_full_path = (
|
||||
|
@ -762,7 +814,10 @@ class ProcessInstanceProcessor:
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="could_not_find_bpmn_process_identifier",
|
||||
message="Could not find the the given bpmn process identifier from any sources: %s"
|
||||
message=(
|
||||
"Could not find the the given bpmn process identifier from any"
|
||||
" sources: %s"
|
||||
)
|
||||
% bpmn_process_identifier,
|
||||
)
|
||||
)
|
||||
|
@ -786,7 +841,6 @@ class ProcessInstanceProcessor:
|
|||
|
||||
new_bpmn_files = set()
|
||||
for bpmn_process_identifier in processor_dependencies_new:
|
||||
|
||||
# ignore identifiers that spiff already knows about
|
||||
if bpmn_process_identifier in bpmn_process_identifiers_in_parser:
|
||||
continue
|
||||
|
@ -829,7 +883,10 @@ class ProcessInstanceProcessor:
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="no_primary_bpmn_error",
|
||||
message="There is no primary BPMN process id defined for process_model %s"
|
||||
message=(
|
||||
"There is no primary BPMN process id defined for"
|
||||
" process_model %s"
|
||||
)
|
||||
% process_model_info.id,
|
||||
)
|
||||
)
|
||||
|
@ -890,7 +947,10 @@ class ProcessInstanceProcessor:
|
|||
if not bpmn_message.correlations:
|
||||
raise ApiError(
|
||||
"message_correlations_missing",
|
||||
f"Could not find any message correlations bpmn_message: {bpmn_message.name}",
|
||||
(
|
||||
"Could not find any message correlations bpmn_message:"
|
||||
f" {bpmn_message.name}"
|
||||
),
|
||||
)
|
||||
|
||||
message_correlations = []
|
||||
|
@ -910,12 +970,16 @@ class ProcessInstanceProcessor:
|
|||
if message_correlation_property is None:
|
||||
raise ApiError(
|
||||
"message_correlations_missing_from_process",
|
||||
"Could not find a known message correlation with identifier:"
|
||||
f"{message_correlation_property_identifier}",
|
||||
(
|
||||
"Could not find a known message correlation with"
|
||||
f" identifier:{message_correlation_property_identifier}"
|
||||
),
|
||||
)
|
||||
message_correlations.append(
|
||||
{
|
||||
"message_correlation_property": message_correlation_property,
|
||||
"message_correlation_property": (
|
||||
message_correlation_property
|
||||
),
|
||||
"name": message_correlation_key,
|
||||
"value": message_correlation_property_value,
|
||||
}
|
||||
|
@ -972,7 +1036,10 @@ class ProcessInstanceProcessor:
|
|||
if message_model is None:
|
||||
raise ApiError(
|
||||
"invalid_message_name",
|
||||
f"Invalid message name: {waiting_task.task_spec.event_definition.name}.",
|
||||
(
|
||||
"Invalid message name:"
|
||||
f" {waiting_task.task_spec.event_definition.name}."
|
||||
),
|
||||
)
|
||||
|
||||
# Ensure we are only creating one message instance for each waiting message
|
||||
|
@ -1179,11 +1246,20 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
return user_tasks # type: ignore
|
||||
|
||||
def complete_task(self, task: SpiffTask, active_task: ActiveTaskModel) -> None:
|
||||
def complete_task(
|
||||
self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel
|
||||
) -> None:
|
||||
"""Complete_task."""
|
||||
self.increment_spiff_step()
|
||||
self.bpmn_process_instance.complete_task_from_id(task.id)
|
||||
self.save_spiff_step_details(active_task)
|
||||
human_task.completed_by_user_id = user.id
|
||||
human_task.completed = True
|
||||
db.session.add(human_task)
|
||||
details_model = self.spiff_step_details()
|
||||
db.session.add(details_model)
|
||||
|
||||
# this is the thing that actually commits the db transaction (on behalf of the other updates above as well)
|
||||
self.save()
|
||||
|
||||
def get_data(self) -> dict[str, Any]:
|
||||
"""Get_data."""
|
||||
|
|
|
@ -1,14 +1,31 @@
|
|||
"""Process_instance_report_service."""
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
import sqlalchemy
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance_metadata import (
|
||||
ProcessInstanceMetadataModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -16,14 +33,17 @@ class ProcessInstanceReportFilter:
|
|||
"""ProcessInstanceReportFilter."""
|
||||
|
||||
process_model_identifier: Optional[str] = None
|
||||
user_group_identifier: Optional[str] = None
|
||||
start_from: Optional[int] = None
|
||||
start_to: Optional[int] = None
|
||||
end_from: Optional[int] = None
|
||||
end_to: Optional[int] = None
|
||||
process_status: Optional[list[str]] = None
|
||||
initiated_by_me: Optional[bool] = None
|
||||
has_terminal_status: Optional[bool] = None
|
||||
with_tasks_completed_by_me: Optional[bool] = None
|
||||
with_tasks_completed_by_my_group: Optional[bool] = None
|
||||
with_tasks_assigned_to_my_group: Optional[bool] = None
|
||||
with_relation_to_me: Optional[bool] = None
|
||||
|
||||
def to_dict(self) -> dict[str, str]:
|
||||
"""To_dict."""
|
||||
|
@ -31,6 +51,8 @@ class ProcessInstanceReportFilter:
|
|||
|
||||
if self.process_model_identifier is not None:
|
||||
d["process_model_identifier"] = self.process_model_identifier
|
||||
if self.user_group_identifier is not None:
|
||||
d["user_group_identifier"] = self.user_group_identifier
|
||||
if self.start_from is not None:
|
||||
d["start_from"] = str(self.start_from)
|
||||
if self.start_to is not None:
|
||||
|
@ -43,14 +65,18 @@ class ProcessInstanceReportFilter:
|
|||
d["process_status"] = ",".join(self.process_status)
|
||||
if self.initiated_by_me is not None:
|
||||
d["initiated_by_me"] = str(self.initiated_by_me).lower()
|
||||
if self.has_terminal_status is not None:
|
||||
d["has_terminal_status"] = str(self.has_terminal_status).lower()
|
||||
if self.with_tasks_completed_by_me is not None:
|
||||
d["with_tasks_completed_by_me"] = str(
|
||||
self.with_tasks_completed_by_me
|
||||
).lower()
|
||||
if self.with_tasks_completed_by_my_group is not None:
|
||||
d["with_tasks_completed_by_my_group"] = str(
|
||||
self.with_tasks_completed_by_my_group
|
||||
if self.with_tasks_assigned_to_my_group is not None:
|
||||
d["with_tasks_assigned_to_my_group"] = str(
|
||||
self.with_tasks_assigned_to_my_group
|
||||
).lower()
|
||||
if self.with_relation_to_me is not None:
|
||||
d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
|
||||
|
||||
return d
|
||||
|
||||
|
@ -58,6 +84,55 @@ class ProcessInstanceReportFilter:
|
|||
class ProcessInstanceReportService:
|
||||
"""ProcessInstanceReportService."""
|
||||
|
||||
@classmethod
|
||||
def system_metadata_map(cls, metadata_key: str) -> dict[str, Any]:
|
||||
"""System_metadata_map."""
|
||||
# TODO replace with system reports that are loaded on launch (or similar)
|
||||
temp_system_metadata_map = {
|
||||
"default": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_completed_instances_initiated_by_me": {
|
||||
"columns": [
|
||||
{"Header": "id", "accessor": "id"},
|
||||
{
|
||||
"Header": "process_model_display_name",
|
||||
"accessor": "process_model_display_name",
|
||||
},
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
"filter_by": [
|
||||
{"field_name": "initiated_by_me", "field_value": True},
|
||||
{"field_name": "has_terminal_status", "field_value": True},
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_completed_instances_with_tasks_completed_by_me": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [
|
||||
{"field_name": "with_tasks_completed_by_me", "field_value": True},
|
||||
{"field_name": "has_terminal_status", "field_value": True},
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_completed_instances_with_tasks_completed_by_my_groups": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [
|
||||
{
|
||||
"field_name": "with_tasks_assigned_to_my_group",
|
||||
"field_value": True,
|
||||
},
|
||||
{"field_name": "has_terminal_status", "field_value": True},
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
}
|
||||
return temp_system_metadata_map[metadata_key]
|
||||
|
||||
@classmethod
|
||||
def report_with_identifier(
|
||||
cls,
|
||||
|
@ -82,50 +157,10 @@ class ProcessInstanceReportService:
|
|||
if process_instance_report is not None:
|
||||
return process_instance_report # type: ignore
|
||||
|
||||
# TODO replace with system reports that are loaded on launch (or similar)
|
||||
temp_system_metadata_map = {
|
||||
"default": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_instances_initiated_by_me": {
|
||||
"columns": [
|
||||
{"Header": "id", "accessor": "id"},
|
||||
{
|
||||
"Header": "process_model_display_name",
|
||||
"accessor": "process_model_display_name",
|
||||
},
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
"filter_by": [{"field_name": "initiated_by_me", "field_value": True}],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_instances_with_tasks_completed_by_me": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [
|
||||
{"field_name": "with_tasks_completed_by_me", "field_value": True}
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_instances_with_tasks_completed_by_my_groups": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [
|
||||
{
|
||||
"field_name": "with_tasks_completed_by_my_group",
|
||||
"field_value": True,
|
||||
}
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
}
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel(
|
||||
identifier=report_identifier,
|
||||
created_by_id=user.id,
|
||||
report_metadata=temp_system_metadata_map[report_identifier],
|
||||
report_metadata=cls.system_metadata_map(report_identifier),
|
||||
)
|
||||
|
||||
return process_instance_report # type: ignore
|
||||
|
@ -164,27 +199,31 @@ class ProcessInstanceReportService:
|
|||
return filters[key].split(",") if key in filters else None
|
||||
|
||||
process_model_identifier = filters.get("process_model_identifier")
|
||||
user_group_identifier = filters.get("user_group_identifier")
|
||||
start_from = int_value("start_from")
|
||||
start_to = int_value("start_to")
|
||||
end_from = int_value("end_from")
|
||||
end_to = int_value("end_to")
|
||||
process_status = list_value("process_status")
|
||||
initiated_by_me = bool_value("initiated_by_me")
|
||||
has_terminal_status = bool_value("has_terminal_status")
|
||||
with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me")
|
||||
with_tasks_completed_by_my_group = bool_value(
|
||||
"with_tasks_completed_by_my_group"
|
||||
)
|
||||
with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group")
|
||||
with_relation_to_me = bool_value("with_relation_to_me")
|
||||
|
||||
report_filter = ProcessInstanceReportFilter(
|
||||
process_model_identifier,
|
||||
user_group_identifier,
|
||||
start_from,
|
||||
start_to,
|
||||
end_from,
|
||||
end_to,
|
||||
process_status,
|
||||
initiated_by_me,
|
||||
has_terminal_status,
|
||||
with_tasks_completed_by_me,
|
||||
with_tasks_completed_by_my_group,
|
||||
with_tasks_assigned_to_my_group,
|
||||
with_relation_to_me,
|
||||
)
|
||||
|
||||
return report_filter
|
||||
|
@ -194,20 +233,25 @@ class ProcessInstanceReportService:
|
|||
cls,
|
||||
process_instance_report: ProcessInstanceReportModel,
|
||||
process_model_identifier: Optional[str] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
start_from: Optional[int] = None,
|
||||
start_to: Optional[int] = None,
|
||||
end_from: Optional[int] = None,
|
||||
end_to: Optional[int] = None,
|
||||
process_status: Optional[str] = None,
|
||||
initiated_by_me: Optional[bool] = None,
|
||||
has_terminal_status: Optional[bool] = None,
|
||||
with_tasks_completed_by_me: Optional[bool] = None,
|
||||
with_tasks_completed_by_my_group: Optional[bool] = None,
|
||||
with_tasks_assigned_to_my_group: Optional[bool] = None,
|
||||
with_relation_to_me: Optional[bool] = None,
|
||||
) -> ProcessInstanceReportFilter:
|
||||
"""Filter_from_metadata_with_overrides."""
|
||||
report_filter = cls.filter_from_metadata(process_instance_report)
|
||||
|
||||
if process_model_identifier is not None:
|
||||
report_filter.process_model_identifier = process_model_identifier
|
||||
if user_group_identifier is not None:
|
||||
report_filter.user_group_identifier = user_group_identifier
|
||||
if start_from is not None:
|
||||
report_filter.start_from = start_from
|
||||
if start_to is not None:
|
||||
|
@ -220,12 +264,16 @@ class ProcessInstanceReportService:
|
|||
report_filter.process_status = process_status.split(",")
|
||||
if initiated_by_me is not None:
|
||||
report_filter.initiated_by_me = initiated_by_me
|
||||
if has_terminal_status is not None:
|
||||
report_filter.has_terminal_status = has_terminal_status
|
||||
if with_tasks_completed_by_me is not None:
|
||||
report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me
|
||||
if with_tasks_completed_by_my_group is not None:
|
||||
report_filter.with_tasks_completed_by_my_group = (
|
||||
with_tasks_completed_by_my_group
|
||||
if with_tasks_assigned_to_my_group is not None:
|
||||
report_filter.with_tasks_assigned_to_my_group = (
|
||||
with_tasks_assigned_to_my_group
|
||||
)
|
||||
if with_relation_to_me is not None:
|
||||
report_filter.with_relation_to_me = with_relation_to_me
|
||||
|
||||
return report_filter
|
||||
|
||||
|
@ -241,9 +289,9 @@ class ProcessInstanceReportService:
|
|||
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
|
||||
for metadata_column in metadata_columns:
|
||||
if metadata_column["accessor"] not in process_instance_dict:
|
||||
process_instance_dict[
|
||||
metadata_column["accessor"]
|
||||
] = process_instance[metadata_column["accessor"]]
|
||||
process_instance_dict[metadata_column["accessor"]] = (
|
||||
process_instance[metadata_column["accessor"]]
|
||||
)
|
||||
|
||||
results.append(process_instance_dict)
|
||||
return results
|
||||
|
@ -268,3 +316,207 @@ class ProcessInstanceReportService:
|
|||
{"Header": "Username", "accessor": "username", "filterable": False},
|
||||
{"Header": "Status", "accessor": "status", "filterable": False},
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def run_process_instance_report(
|
||||
cls,
|
||||
report_filter: ProcessInstanceReportFilter,
|
||||
process_instance_report: ProcessInstanceReportModel,
|
||||
user: UserModel,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> dict:
|
||||
"""Run_process_instance_report."""
|
||||
process_instance_query = ProcessInstanceModel.query
|
||||
# Always join that hot user table for good performance at serialization time.
|
||||
process_instance_query = process_instance_query.options(
|
||||
selectinload(ProcessInstanceModel.process_initiator)
|
||||
)
|
||||
|
||||
if report_filter.process_model_identifier is not None:
|
||||
process_model = ProcessModelService.get_process_model(
|
||||
f"{report_filter.process_model_identifier}",
|
||||
)
|
||||
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_model_identifier=process_model.id
|
||||
)
|
||||
|
||||
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
|
||||
if (
|
||||
ProcessInstanceModel.start_in_seconds is None
|
||||
or ProcessInstanceModel.end_in_seconds is None
|
||||
):
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="unexpected_condition",
|
||||
message="Something went very wrong",
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
if report_filter.start_from is not None:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.start_in_seconds >= report_filter.start_from
|
||||
)
|
||||
if report_filter.start_to is not None:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.start_in_seconds <= report_filter.start_to
|
||||
)
|
||||
if report_filter.end_from is not None:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.end_in_seconds >= report_filter.end_from
|
||||
)
|
||||
if report_filter.end_to is not None:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.end_in_seconds <= report_filter.end_to
|
||||
)
|
||||
if report_filter.process_status is not None:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore
|
||||
)
|
||||
|
||||
if report_filter.initiated_by_me is True:
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_initiator=user
|
||||
)
|
||||
|
||||
if report_filter.has_terminal_status is True:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
|
||||
)
|
||||
|
||||
if (
|
||||
not report_filter.with_tasks_completed_by_me
|
||||
and not report_filter.with_tasks_assigned_to_my_group
|
||||
and report_filter.with_relation_to_me is True
|
||||
):
|
||||
process_instance_query = process_instance_query.outerjoin(
|
||||
HumanTaskModel
|
||||
).outerjoin(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
HumanTaskUserModel.user_id == user.id,
|
||||
),
|
||||
)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
or_(
|
||||
HumanTaskUserModel.id.is_not(None),
|
||||
ProcessInstanceModel.process_initiator_id == user.id,
|
||||
)
|
||||
)
|
||||
|
||||
if report_filter.with_tasks_completed_by_me is True:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id != user.id
|
||||
)
|
||||
process_instance_query = process_instance_query.join(
|
||||
HumanTaskModel,
|
||||
and_(
|
||||
HumanTaskModel.process_instance_id == ProcessInstanceModel.id,
|
||||
HumanTaskModel.completed_by_user_id == user.id,
|
||||
),
|
||||
)
|
||||
|
||||
if report_filter.with_tasks_assigned_to_my_group is True:
|
||||
group_model_join_conditions = [
|
||||
GroupModel.id == HumanTaskModel.lane_assignment_id
|
||||
]
|
||||
if report_filter.user_group_identifier:
|
||||
group_model_join_conditions.append(
|
||||
GroupModel.identifier == report_filter.user_group_identifier
|
||||
)
|
||||
process_instance_query = process_instance_query.join(HumanTaskModel)
|
||||
process_instance_query = process_instance_query.join(
|
||||
GroupModel, and_(*group_model_join_conditions)
|
||||
)
|
||||
process_instance_query = process_instance_query.join(
|
||||
UserGroupAssignmentModel,
|
||||
UserGroupAssignmentModel.group_id == GroupModel.id,
|
||||
)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
UserGroupAssignmentModel.user_id == user.id
|
||||
)
|
||||
|
||||
instance_metadata_aliases = {}
|
||||
stock_columns = ProcessInstanceReportService.get_column_names_for_model(
|
||||
ProcessInstanceModel
|
||||
)
|
||||
for column in process_instance_report.report_metadata["columns"]:
|
||||
if column["accessor"] in stock_columns:
|
||||
continue
|
||||
instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
|
||||
instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
|
||||
|
||||
filter_for_column = None
|
||||
if "filter_by" in process_instance_report.report_metadata:
|
||||
filter_for_column = next(
|
||||
(
|
||||
f
|
||||
for f in process_instance_report.report_metadata["filter_by"]
|
||||
if f["field_name"] == column["accessor"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
isouter = True
|
||||
conditions = [
|
||||
ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
|
||||
instance_metadata_alias.key == column["accessor"],
|
||||
]
|
||||
if filter_for_column:
|
||||
isouter = False
|
||||
conditions.append(
|
||||
instance_metadata_alias.value == filter_for_column["field_value"]
|
||||
)
|
||||
process_instance_query = process_instance_query.join(
|
||||
instance_metadata_alias, and_(*conditions), isouter=isouter
|
||||
).add_columns(
|
||||
func.max(instance_metadata_alias.value).label(column["accessor"])
|
||||
)
|
||||
|
||||
order_by_query_array = []
|
||||
order_by_array = process_instance_report.report_metadata["order_by"]
|
||||
if len(order_by_array) < 1:
|
||||
order_by_array = ProcessInstanceReportModel.default_order_by()
|
||||
for order_by_option in order_by_array:
|
||||
attribute = re.sub("^-", "", order_by_option)
|
||||
if attribute in stock_columns:
|
||||
if order_by_option.startswith("-"):
|
||||
order_by_query_array.append(
|
||||
getattr(ProcessInstanceModel, attribute).desc()
|
||||
)
|
||||
else:
|
||||
order_by_query_array.append(
|
||||
getattr(ProcessInstanceModel, attribute).asc()
|
||||
)
|
||||
elif attribute in instance_metadata_aliases:
|
||||
if order_by_option.startswith("-"):
|
||||
order_by_query_array.append(
|
||||
func.max(instance_metadata_aliases[attribute].value).desc()
|
||||
)
|
||||
else:
|
||||
order_by_query_array.append(
|
||||
func.max(instance_metadata_aliases[attribute].value).asc()
|
||||
)
|
||||
# return process_instance_query
|
||||
process_instances = (
|
||||
process_instance_query.group_by(ProcessInstanceModel.id)
|
||||
.add_columns(ProcessInstanceModel.id)
|
||||
.order_by(*order_by_query_array)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
|
||||
process_instances.items, process_instance_report.report_metadata["columns"]
|
||||
)
|
||||
response_json = {
|
||||
"report": process_instance_report,
|
||||
"results": results,
|
||||
"filters": report_filter.to_dict(),
|
||||
"pagination": {
|
||||
"count": len(results),
|
||||
"total": process_instances.total,
|
||||
"pages": process_instances.pages,
|
||||
},
|
||||
}
|
||||
return response_json
|
||||
|
|
|
@ -8,7 +8,7 @@ from flask_bpmn.api.api_error import ApiError
|
|||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
|
@ -17,6 +17,7 @@ from spiffworkflow_backend.models.task import MultiInstanceType
|
|||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.git_service import GitCommandError
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
|
@ -36,7 +37,10 @@ class ProcessInstanceService:
|
|||
user: UserModel,
|
||||
) -> ProcessInstanceModel:
|
||||
"""Get_process_instance_from_spec."""
|
||||
current_git_revision = GitService.get_current_revision()
|
||||
try:
|
||||
current_git_revision = GitService.get_current_revision()
|
||||
except GitCommandError:
|
||||
current_git_revision = ""
|
||||
process_instance_model = ProcessInstanceModel(
|
||||
status=ProcessInstanceStatus.not_started.value,
|
||||
process_initiator=user,
|
||||
|
@ -81,7 +85,8 @@ class ProcessInstanceService:
|
|||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
error_message = (
|
||||
f"Error running waiting task for process_instance {process_instance.id}"
|
||||
"Error running waiting task for process_instance"
|
||||
f" {process_instance.id}"
|
||||
+ f"({process_instance.process_model_identifier}). {str(e)}"
|
||||
)
|
||||
current_app.logger.error(error_message)
|
||||
|
@ -121,7 +126,7 @@ class ProcessInstanceService:
|
|||
if next_task_trying_again is not None:
|
||||
process_instance_api.next_task = (
|
||||
ProcessInstanceService.spiff_task_to_api_task(
|
||||
next_task_trying_again, add_docs_and_forms=True
|
||||
processor, next_task_trying_again, add_docs_and_forms=True
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -174,7 +179,10 @@ class ProcessInstanceService:
|
|||
else:
|
||||
raise ApiError.from_task(
|
||||
error_code="task_lane_user_error",
|
||||
message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it."
|
||||
message=(
|
||||
"Spiff Task %s lane user dict must have a key called"
|
||||
" 'value' with the user's uid in it."
|
||||
)
|
||||
% spiff_task.task_spec.name,
|
||||
task=spiff_task,
|
||||
)
|
||||
|
@ -196,7 +204,7 @@ class ProcessInstanceService:
|
|||
spiff_task: SpiffTask,
|
||||
data: dict[str, Any],
|
||||
user: UserModel,
|
||||
active_task: ActiveTaskModel,
|
||||
human_task: HumanTaskModel,
|
||||
) -> None:
|
||||
"""All the things that need to happen when we complete a form.
|
||||
|
||||
|
@ -210,7 +218,7 @@ class ProcessInstanceService:
|
|||
dot_dct = ProcessInstanceService.create_dot_dict(data)
|
||||
spiff_task.update_data(dot_dct)
|
||||
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
|
||||
processor.complete_task(spiff_task, active_task)
|
||||
processor.complete_task(spiff_task, human_task, user=user)
|
||||
processor.do_engine_steps(save=True)
|
||||
|
||||
@staticmethod
|
||||
|
@ -277,7 +285,9 @@ class ProcessInstanceService:
|
|||
|
||||
@staticmethod
|
||||
def spiff_task_to_api_task(
|
||||
spiff_task: SpiffTask, add_docs_and_forms: bool = False
|
||||
processor: ProcessInstanceProcessor,
|
||||
spiff_task: SpiffTask,
|
||||
add_docs_and_forms: bool = False,
|
||||
) -> Task:
|
||||
"""Spiff_task_to_api_task."""
|
||||
task_type = spiff_task.task_spec.spec_type
|
||||
|
@ -302,10 +312,17 @@ class ProcessInstanceService:
|
|||
else:
|
||||
lane = None
|
||||
|
||||
if hasattr(spiff_task.task_spec, "spec"):
|
||||
call_activity_process_identifier = spiff_task.task_spec.spec
|
||||
else:
|
||||
call_activity_process_identifier = None
|
||||
|
||||
parent_id = None
|
||||
if spiff_task.parent:
|
||||
parent_id = spiff_task.parent.id
|
||||
|
||||
serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec)
|
||||
|
||||
task = Task(
|
||||
spiff_task.id,
|
||||
spiff_task.task_spec.name,
|
||||
|
@ -316,9 +333,11 @@ class ProcessInstanceService:
|
|||
multi_instance_type=mi_type,
|
||||
multi_instance_count=info["mi_count"],
|
||||
multi_instance_index=info["mi_index"],
|
||||
process_name=spiff_task.task_spec._wf_spec.description,
|
||||
process_identifier=spiff_task.task_spec._wf_spec.name,
|
||||
properties=props,
|
||||
parent=parent_id,
|
||||
event_definition=serialized_task_spec.get("event_definition"),
|
||||
call_activity_process_identifier=call_activity_process_identifier,
|
||||
)
|
||||
|
||||
return task
|
||||
|
|
|
@ -146,7 +146,10 @@ class ProcessModelService(FileSystemService):
|
|||
if len(instances) > 0:
|
||||
raise ApiError(
|
||||
error_code="existing_instances",
|
||||
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
|
||||
message=(
|
||||
f"We cannot delete the model `{process_model_id}`, there are"
|
||||
" existing instances that depend on it."
|
||||
),
|
||||
)
|
||||
process_model = self.get_process_model(process_model_id)
|
||||
path = self.workflow_path(process_model)
|
||||
|
@ -172,7 +175,6 @@ class ProcessModelService(FileSystemService):
|
|||
cls, relative_path: str
|
||||
) -> ProcessModelInfo:
|
||||
"""Get_process_model_from_relative_path."""
|
||||
process_group_identifier, _ = os.path.split(relative_path)
|
||||
path = os.path.join(FileSystemService.root_path(), relative_path)
|
||||
return cls.__scan_process_model(path)
|
||||
|
||||
|
@ -224,11 +226,11 @@ class ProcessModelService(FileSystemService):
|
|||
user = UserService.current_user()
|
||||
new_process_model_list = []
|
||||
for process_model in process_models:
|
||||
uri = f"/v1.0/process-models/{process_model.id.replace('/', ':')}/process-instances"
|
||||
result = AuthorizationService.user_has_permission(
|
||||
uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=user, permission="create", target_uri=uri
|
||||
)
|
||||
if result:
|
||||
if has_permission:
|
||||
new_process_model_list.append(process_model)
|
||||
return new_process_model_list
|
||||
|
||||
|
@ -340,8 +342,11 @@ class ProcessModelService(FileSystemService):
|
|||
if len(problem_models) > 0:
|
||||
raise ApiError(
|
||||
error_code="existing_instances",
|
||||
message=f"We cannot delete the group `{process_group_id}`, "
|
||||
f"there are models with existing instances inside the group. {problem_models}",
|
||||
message=(
|
||||
f"We cannot delete the group `{process_group_id}`, there are"
|
||||
" models with existing instances inside the group."
|
||||
f" {problem_models}"
|
||||
),
|
||||
)
|
||||
shutil.rmtree(path)
|
||||
self.cleanup_process_group_display_order()
|
||||
|
@ -393,7 +398,10 @@ class ProcessModelService(FileSystemService):
|
|||
if process_group is None:
|
||||
raise ApiError(
|
||||
error_code="process_group_could_not_be_loaded_from_disk",
|
||||
message=f"We could not load the process_group from disk from: {dir_path}",
|
||||
message=(
|
||||
"We could not load the process_group from disk from:"
|
||||
f" {dir_path}"
|
||||
),
|
||||
)
|
||||
else:
|
||||
process_group_id = dir_path.replace(FileSystemService.root_path(), "")
|
||||
|
@ -430,6 +438,9 @@ class ProcessModelService(FileSystemService):
|
|||
# process_group.process_groups.sort()
|
||||
return process_group
|
||||
|
||||
# path might have backslashes on windows, not sure
|
||||
# not sure if os.path.join converts forward slashes in the relative_path argument to backslashes:
|
||||
# path = os.path.join(FileSystemService.root_path(), relative_path)
|
||||
@classmethod
|
||||
def __scan_process_model(
|
||||
cls,
|
||||
|
@ -446,12 +457,19 @@ class ProcessModelService(FileSystemService):
|
|||
data.pop("process_group_id")
|
||||
# we don't save `id` in the json file, so we add it back in here.
|
||||
relative_path = os.path.relpath(path, FileSystemService.root_path())
|
||||
|
||||
# even on windows, use forward slashes for ids
|
||||
relative_path = relative_path.replace("\\", "/")
|
||||
|
||||
data["id"] = relative_path
|
||||
process_model_info = ProcessModelInfo(**data)
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_loaded_from_disk",
|
||||
message=f"We could not load the process_model from disk with data: {data}",
|
||||
message=(
|
||||
"We could not load the process_model from disk with data:"
|
||||
f" {data}"
|
||||
),
|
||||
)
|
||||
else:
|
||||
if name is None:
|
||||
|
|
|
@ -112,7 +112,10 @@ class ScriptUnitTestRunner:
|
|||
except json.decoder.JSONDecodeError as ex:
|
||||
return ScriptUnitTestResult(
|
||||
result=False,
|
||||
error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}",
|
||||
error=(
|
||||
"Failed to parse expectedOutputJson:"
|
||||
f" {unit_test['expectedOutputJson']}: {str(ex)}"
|
||||
),
|
||||
)
|
||||
|
||||
script = task.task_spec.script
|
||||
|
|
|
@ -44,8 +44,10 @@ class SecretService:
|
|||
except Exception as e:
|
||||
raise ApiError(
|
||||
error_code="create_secret_error",
|
||||
message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. "
|
||||
f"Original error is {e}",
|
||||
message=(
|
||||
f"There was an error creating a secret with key: {key} and value"
|
||||
f" ending with: {value[:-4]}. Original error is {e}"
|
||||
),
|
||||
) from e
|
||||
return secret_model
|
||||
|
||||
|
@ -89,7 +91,9 @@ class SecretService:
|
|||
else:
|
||||
raise ApiError(
|
||||
error_code="update_secret_error",
|
||||
message=f"Cannot update secret with key: {key}. Resource does not exist.",
|
||||
message=(
|
||||
f"Cannot update secret with key: {key}. Resource does not exist."
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
|
@ -104,11 +108,16 @@ class SecretService:
|
|||
except Exception as e:
|
||||
raise ApiError(
|
||||
error_code="delete_secret_error",
|
||||
message=f"Could not delete secret with key: {key}. Original error is: {e}",
|
||||
message=(
|
||||
f"Could not delete secret with key: {key}. Original error"
|
||||
f" is: {e}"
|
||||
),
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="delete_secret_error",
|
||||
message=f"Cannot delete secret with key: {key}. Resource does not exist.",
|
||||
message=(
|
||||
f"Cannot delete secret with key: {key}. Resource does not exist."
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
|
|
@ -31,7 +31,6 @@ class ServiceTaskDelegate:
|
|||
if value.startswith(secret_prefix):
|
||||
key = value.removeprefix(secret_prefix)
|
||||
secret = SecretService().get_secret(key)
|
||||
assert secret # noqa: S101
|
||||
return secret.value
|
||||
|
||||
file_prefix = "file:"
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue