diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7587ce0d1..3f1c6dbb8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,6 +11,12 @@ repos: require_serial: true # exclude: ^migrations/ exclude: "/migrations/" + + # otherwise it will not fix long lines if the long lines contain long strings + # https://github.com/psf/black/pull/1132 + # https://github.com/psf/black/pull/1609 + args: [--preview] + - id: check-added-large-files files: ^spiffworkflow-backend/ name: Check for added large files diff --git a/docker-compose.yml b/docker-compose.yml index 1cf550248..b505499b5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,9 +10,9 @@ services: environment: - MYSQL_DATABASE=spiffworkflow_backend_development - MYSQL_ROOT_PASSWORD=my-secret-pw - - MYSQL_TCP_PORT=7003 + - MYSQL_TCP_PORT=8003 ports: - - "7003" + - "8003" healthcheck: test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development interval: 10s @@ -30,12 +30,12 @@ services: - SPIFFWORKFLOW_BACKEND_ENV=development - FLASK_DEBUG=0 - FLASK_SESSION_SECRET_KEY=super_secret_key - - OPEN_ID_SERVER_URL=http://localhost:7000/openid - - SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001 - - SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000 - - SPIFFWORKFLOW_BACKEND_PORT=7000 + - OPEN_ID_SERVER_URL=http://localhost:8000/openid + - SPIFFWORKFLOW_FRONTEND_URL=http://localhost:8001 + - SPIFFWORKFLOW_BACKEND_URL=http://localhost:8000 + - SPIFFWORKFLOW_BACKEND_PORT=8000 - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true - - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:8003/spiffworkflow_backend_development - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false - SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml @@ -43,12 +43,12 @@ services: - OPEN_ID_CLIENT_ID=spiffworkflow-backend - OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key ports: - - "7000:7000" + - "8000:8000" volumes: - ./process_models:/app/process_models - ./log:/app/log healthcheck: - test: curl localhost:7000/v1.0/status --fail + test: curl localhost:8000/v1.0/status --fail interval: 10s timeout: 5s retries: 20 @@ -58,9 +58,9 @@ services: image: ghcr.io/sartography/spiffworkflow-frontend environment: - APPLICATION_ROOT=/ - - PORT0=7001 + - PORT0=8001 ports: - - "7001:7001" + - "8001:8001" spiffworkflow-connector: container_name: spiffworkflow-connector @@ -69,10 +69,11 @@ services: - FLASK_ENV=${FLASK_ENV:-development} - FLASK_DEBUG=0 - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + - CONNECTOR_PROXY_PORT=8004 ports: - - "7004:7004" + - "8004:8004" healthcheck: - test: curl localhost:7004/liveness --fail + test: curl localhost:8004/liveness --fail interval: 10s timeout: 5s retries: 20 diff --git a/flask-bpmn/poetry.lock b/flask-bpmn/poetry.lock index 8b17963a7..e1ce1b3ad 100644 --- a/flask-bpmn/poetry.lock +++ b/flask-bpmn/poetry.lock @@ -813,22 +813,6 @@ category = "main" optional = false python-versions = ">=3.6" -[[package]] -name = "libcst" -version = "0.4.3" -description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pyyaml = ">=5.2" -typing-extensions = ">=3.7.4.2" -typing-inspect = ">=0.4.0" - -[package.extras] -dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"] - [[package]] name = "livereload" version = "2.6.3" @@ -905,18 +889,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "monkeytype" -version = "22.2.0" -description = "Generating type annotations from sampled production types" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -libcst = ">=0.3.7" -mypy-extensions = "*" - [[package]] name = "mypy" version = "0.991" @@ -1504,7 +1476,7 @@ test = ["pytest"] [[package]] name = "SpiffWorkflow" version = "1.2.1" -description = "" +description = "A workflow framework and BPMN/DMN Processor" category = "main" optional = false python-versions = "*" @@ -1520,7 +1492,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46" +resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994" [[package]] name = "sqlalchemy" @@ -1627,18 +1599,6 @@ category = "main" optional = false python-versions = ">=3.7" -[[package]] -name = "typing-inspect" -version = "0.7.1" -description = "Runtime inspection utilities for typing module." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - [[package]] name = "unidecode" version = "1.3.4" @@ -1770,7 +1730,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d" +content-hash = "45cac5741fa47e44710f5aae6dfdb4636fc4d60df2d6aba467052fdd5199e791" [metadata.files] alabaster = [ @@ -2234,32 +2194,6 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] -libcst = [ - {file = "libcst-0.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bea98a8be2b1725784ae01e89519121eba7d81280dcbee40ae03ececd7277cf3"}, - {file = "libcst-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d9191c764645dddf94d49885e590433fa0ee6d347b07eec86566786e6d2ada5"}, - {file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f22e9787e44304e7cd9744e543602ab2c1bca8b922cb6237ea08d9a0be3fdd"}, - {file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff147dd77b6ea72e4f2f0abfcd1be11a3108c28cb65e6da666c0b77142033f7c"}, - {file = "libcst-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d744d4a6301c75322f1d88365dccfe402a51e724583a2edc4cba474462cc9419"}, - {file = "libcst-0.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:ed0f15545eddfdd6270069ce0b2d4c253298817bd676a1a6adddaa1d66c7e28b"}, - {file = "libcst-0.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6f57056a743853c01bbd21bfd96c2a1b4c317bbc66920f5f2c9999b3dca7233"}, - {file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3d33da8f9b088e118bfc6ecacdd627ac237baeb490f4d7a383af4df4ea4f82"}, - {file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df5f51a837fc10cdbf5c61acb467f6c15d5f9ca1d94a84a6a29c4f20ce7b437e"}, - {file = "libcst-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f744f60057c8998b856d9baf28765c65574992f4a49830ca350010fc31f4eac4"}, - {file = "libcst-0.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:88ab371aab82f7241448e263ec42abced649a77cdd21df960268e6df70b3f3f7"}, - {file = "libcst-0.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:826ea5f10a84625db861ccf35946317f4f29e575261e44c0cd6c24c4dde5c2bb"}, - {file = "libcst-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab5b23796ce66303398bb7b2d27bcb17d2416dacd3d00229c961aed87d79a3b"}, - {file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afc793c95af79e5adc5905713ccddff034d0de3e3da748424b722edf890227de"}, - {file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c982387b8e23ad18efbd0287004924931a0b05c91ed5630453faf224bb0b185"}, - {file = "libcst-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4c25aca45df5f86a6a1c8c219e8c7a90acdaef02b53eb01eafa563381cb0ce"}, - {file = "libcst-0.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1a395129ecf6c6ce429427f34100ccd99f35898a98187764a4559d9f92166cd0"}, - {file = "libcst-0.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ca00819affafccb02b2582ec47706712b995c9887cad02bb8efe94a066830f37"}, - {file = "libcst-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:231a9ca446570f9b63d8c2c6dbf6c796fb939a5e4ef9dc0dd9304a21a6c0da16"}, - {file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b08e7a56950479c856183ad6fdf0a21df028d6732e1d19822ec1593e32f700ca"}, - {file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cb70e7e5118234e75d309fcf04931e20f282f16c80dda464fc1b88ef02e52e4"}, - {file = "libcst-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8c00b24ab39facff463b18b9abc8df7dd063ae0ce9fe2e78e199c9a8572e37"}, - {file = "libcst-0.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:28f35b9a21b2f8982a8ed3f53b1fdbc5435252409d34d061a3229dc4b413b8c7"}, - {file = "libcst-0.4.3.tar.gz", hash = "sha256:f79ab61287505d97ed57ead14b78777f48cd6ec5339ca4978987e4c35957a465"}, -] livereload = [ {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] @@ -2389,10 +2323,6 @@ mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -monkeytype = [ - {file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"}, - {file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"}, -] mypy = [ {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, @@ -2808,11 +2738,6 @@ typing-extensions = [ {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] -typing-inspect = [ - {file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"}, - {file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"}, - {file = "typing_inspect-0.7.1.tar.gz", hash = "sha256:047d4097d9b17f46531bf6f014356111a1b6fb821a24fe7ac909853ca2a782aa"}, -] unidecode = [ {file = "Unidecode-1.3.4-py3-none-any.whl", hash = "sha256:afa04efcdd818a93237574791be9b2817d7077c25a068b00f8cff7baa4e59257"}, {file = "Unidecode-1.3.4.tar.gz", hash = "sha256:8e4352fb93d5a735c788110d2e7ac8e8031eb06ccbfe8d324ab71735015f9342"}, diff --git a/flask-bpmn/src/flask_bpmn/api/api_error.py b/flask-bpmn/src/flask_bpmn/api/api_error.py index c782c2d38..ed792e7e8 100644 --- a/flask-bpmn/src/flask_bpmn/api/api_error.py +++ b/flask-bpmn/src/flask_bpmn/api/api_error.py @@ -175,6 +175,10 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: if not isinstance(exception, ApiError) or exception.error_code != "invalid_token": id = capture_exception(exception) + if isinstance(exception, ApiError): + current_app.logger.info( + f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}") + organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG") project_slug = current_app.config.get("SENTRY_PROJECT_SLUG") if organization_slug and project_slug: diff --git a/poetry.lock b/poetry.lock index e5c9c4c04..118134c7f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -163,7 +163,7 @@ python-versions = "*" [[package]] name = "black" -version = "22.10.0" +version = "23.1a1" description = "The uncompromising code formatter." category = "dev" optional = false @@ -614,7 +614,7 @@ werkzeug = "*" type = "git" url = "https://github.com/sartography/flask-bpmn" reference = "main" -resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4" +resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b" [[package]] name = "flask-cors" @@ -1760,7 +1760,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4" +resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c" [[package]] name = "sqlalchemy" @@ -2182,27 +2182,18 @@ billiard = [ {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, ] black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"}, + {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"}, + {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"}, + {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"}, + {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"}, + {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"}, + {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"}, + {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"}, + {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"}, + {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"}, + {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"}, + {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"}, ] blinker = [ {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, @@ -2857,7 +2848,18 @@ psycopg2 = [ {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, ] pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ diff --git a/spiffworkflow-backend/.gitignore b/spiffworkflow-backend/.gitignore index 58cb14347..a61561685 100644 --- a/spiffworkflow-backend/.gitignore +++ b/spiffworkflow-backend/.gitignore @@ -1,7 +1,7 @@ .mypy_cache/ /.idea/ /.coverage -/.coverage.* +.coverage.* /.nox/ /.python-version /.pytype/ diff --git a/spiffworkflow-backend/bin/delete_and_import_all_permissions.py b/spiffworkflow-backend/bin/delete_and_import_all_permissions.py index a55e36e7f..966ec5a11 100644 --- a/spiffworkflow-backend/bin/delete_and_import_all_permissions.py +++ b/spiffworkflow-backend/bin/delete_and_import_all_permissions.py @@ -7,7 +7,8 @@ def main() -> None: """Main.""" app = get_hacked_up_app_for_script() with app.app_context(): - AuthorizationService.delete_all_permissions_and_recreate() + AuthorizationService.delete_all_permissions() + AuthorizationService.import_permissions_from_yaml_file() if __name__ == "__main__": diff --git a/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance b/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance old mode 100755 new mode 100644 index 9b6b4c757..dbce01ecc --- a/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance +++ b/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance @@ -1,5 +1,4 @@ """Get the bpmn process json for a given process instance id and store it in /tmp.""" -#!/usr/bin/env python import os import sys @@ -18,15 +17,17 @@ def main(process_instance_id: str): id=process_instance_id ).first() + file_path = f"/tmp/{process_instance_id}_bpmn_json.json" if not process_instance: raise Exception( f"Could not find a process instance with id: {process_instance_id}" ) with open( - f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8" + file_path, "w", encoding="utf-8" ) as f: f.write(process_instance.bpmn_json) + print(f"Saved to {file_path}") if len(sys.argv) < 2: diff --git a/spiffworkflow-backend/bin/get_logs_from_docker_compose b/spiffworkflow-backend/bin/get_logs_from_docker_compose index 78c7684e3..d2c06c6f3 100755 --- a/spiffworkflow-backend/bin/get_logs_from_docker_compose +++ b/spiffworkflow-backend/bin/get_logs_from_docker_compose @@ -7,4 +7,5 @@ function error_handler() { trap 'error_handler ${LINENO} $?' ERR set -o errtrace -o errexit -o nounset -o pipefail -docker compose logs "$@" +# "docker compose logs" is only getting the db logs so specify them both +docker compose logs db spiffworkflow-backend diff --git a/spiffworkflow-backend/bin/get_perms b/spiffworkflow-backend/bin/get_perms new file mode 100755 index 000000000..5e0dbd6de --- /dev/null +++ b/spiffworkflow-backend/bin/get_perms @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +set -x +mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;' diff --git a/spiffworkflow-backend/bin/get_routes b/spiffworkflow-backend/bin/get_routes new file mode 100755 index 000000000..63f194ef0 --- /dev/null +++ b/spiffworkflow-backend/bin/get_routes @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +grep -E '^ +\/' src/spiffworkflow_backend/api.yml | sort diff --git a/spiffworkflow-backend/bin/import_tickets_for_command_line.py b/spiffworkflow-backend/bin/import_tickets_for_command_line.py index e193b5990..cc94ba545 100644 --- a/spiffworkflow-backend/bin/import_tickets_for_command_line.py +++ b/spiffworkflow-backend/bin/import_tickets_for_command_line.py @@ -27,7 +27,6 @@ def main(): """Main.""" app = get_hacked_up_app_for_script() with app.app_context(): - process_model_identifier_ticket = "ticket" db.session.query(ProcessInstanceModel).filter( ProcessInstanceModel.process_model_identifier diff --git a/spiffworkflow-backend/bin/keycloak_test_server.py b/spiffworkflow-backend/bin/keycloak_test_server.py index 59efd36c5..3e9334938 100644 --- a/spiffworkflow-backend/bin/keycloak_test_server.py +++ b/spiffworkflow-backend/bin/keycloak_test_server.py @@ -40,7 +40,8 @@ def hello_world(): return ( 'Hello, %s, See private ' 'Log out' - ) % oidc.user_getfield("preferred_username") + % oidc.user_getfield("preferred_username") + ) else: return 'Welcome anonymous, Log in' diff --git a/spiffworkflow-backend/bin/recreate_db b/spiffworkflow-backend/bin/recreate_db index 5eb248fe0..ec38c7b39 100755 --- a/spiffworkflow-backend/bin/recreate_db +++ b/spiffworkflow-backend/bin/recreate_db @@ -61,3 +61,7 @@ for task in $tasks; do done SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade +if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then + mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV" + FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade +fi diff --git a/spiffworkflow-backend/bin/spiffworkflow-realm.json b/spiffworkflow-backend/bin/spiffworkflow-realm.json index a30f53c14..e31942cf1 100644 --- a/spiffworkflow-backend/bin/spiffworkflow-realm.json +++ b/spiffworkflow-backend/bin/spiffworkflow-realm.json @@ -426,6 +426,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "admin@spiffworkflow.org", "credentials" : [ { "id" : "ef435043-ef0c-407a-af5b-ced13182a408", "type" : "password", @@ -446,6 +447,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "alex@sartography.com", "credentials" : [ { "id" : "81a61a3b-228d-42b3-b39a-f62d8e7f57ca", "type" : "password", @@ -465,6 +467,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "amir@status.im", "credentials" : [ { "id" : "e589f3ad-bf7b-4756-89f7-7894c03c2831", "type" : "password", @@ -484,6 +487,9 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "firstName" : "", + "lastName" : "", + "email" : "ciadmin1@spiffworkflow.org", "credentials" : [ { "id" : "111b5ea1-c2ab-470a-a16b-2373bc94de7a", "type" : "password", @@ -499,28 +505,6 @@ }, "notBefore" : 0, "groups" : [ ] - }, { - "id" : "56457e8f-47c6-4f9f-a72b-473dea5edfeb", - "createdTimestamp" : 1657139955336, - "username" : "ciuser1", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "credentials" : [ { - "id" : "762f36e9-47af-44da-8520-cf09d752497a", - "type" : "password", - "createdDate" : 1657139966468, - "secretData" : "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "clientRoles" : { - "spiffworkflow-backend" : [ "uma_protection" ] - }, - "notBefore" : 0, - "groups" : [ ] }, { "id" : "d58b61cc-a77e-488f-a427-05f4e0572e20", "createdTimestamp" : 1669132945413, @@ -530,6 +514,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "core@status.im", "credentials" : [ { "id" : "ee80092b-8ee6-4699-8492-566e088b48f5", "type" : "password", @@ -550,6 +535,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "dan@sartography.com", "credentials" : [ { "id" : "d517c520-f500-4542-80e5-7144daef1e32", "type" : "password", @@ -569,6 +555,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "daniel@sartography.com", "credentials" : [ { "id" : "f240495c-265b-42fc-99db-46928580d07d", "type" : "password", @@ -588,6 +575,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "elizabeth@sartography.com", "credentials" : [ { "id" : "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2", "type" : "password", @@ -609,6 +597,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "fin@status.im", "credentials" : [ { "id" : "2379940c-98b4-481a-b629-0bd1a4e91acf", "type" : "password", @@ -631,6 +620,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "fin1@status.im", "credentials" : [ { "id" : "96216746-ff72-454e-8288-232428d10b42", "type" : "password", @@ -651,6 +641,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "finance_user1@status.im", "credentials" : [ { "id" : "f14722ec-13a7-4d35-a4ec-0475d405ae58", "type" : "password", @@ -670,6 +661,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "harmeet@status.im", "credentials" : [ { "id" : "89c26090-9bd3-46ac-b038-883d02e3f125", "type" : "password", @@ -691,6 +683,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "j@status.im", "credentials" : [ { "id" : "e71ec785-9133-4b7d-8015-1978379af0bb", "type" : "password", @@ -711,6 +704,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jakub@status.im", "credentials" : [ { "id" : "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2", "type" : "password", @@ -730,6 +724,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jarrad@status.im", "credentials" : [ { "id" : "113e0343-1069-476d-83f9-21d98edb9cfa", "type" : "password", @@ -749,6 +744,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jason@sartography.com", "credentials" : [ { "id" : "40abf32e-f0cc-4a17-8231-1a69a02c1b0b", "type" : "password", @@ -768,6 +764,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jon@sartography.com", "credentials" : [ { "id" : "8b520e01-5b9b-44ab-9ee8-505bd0831a45", "type" : "password", @@ -787,6 +784,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "kb@sartography.com", "credentials" : [ { "id" : "2c0be363-038f-48f1-86d6-91fdd28657cf", "type" : "password", @@ -808,6 +806,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "lead@status.im", "credentials" : [ { "id" : "96e836a4-1a84-45c5-a9ed-651b0c90195e", "type" : "password", @@ -830,6 +829,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "lead1@status.im", "credentials" : [ { "id" : "4e17388b-6c44-44e1-b20a-a873c0feb9a8", "type" : "password", @@ -850,6 +850,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "manuchehr@status.im", "credentials" : [ { "id" : "07dabf55-b5d3-4f98-abba-3334086ecf5e", "type" : "password", @@ -869,6 +870,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "mike@sartography.com", "credentials" : [ { "id" : "1ed375fb-0f1a-4c2a-9243-2477242cf7bd", "type" : "password", @@ -887,7 +889,10 @@ "username" : "natalia", "enabled" : true, "totp" : false, - "emailVerified" : false, + "emailVerified" : true, + "firstName" : "", + "lastName" : "", + "email" : "natalia@sartography.com", "credentials" : [ { "id" : "b6aa9936-39cc-4931-bfeb-60e6753de5ba", "type" : "password", @@ -907,6 +912,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "sasha@status.im", "credentials" : [ { "id" : "4a170af4-6f0c-4e7b-b70c-e674edf619df", "type" : "password", @@ -926,6 +932,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "service-account@status.im", "serviceAccountClientId" : "spiffworkflow-backend", "credentials" : [ ], "disableableCredentialTypes" : [ ], @@ -943,6 +950,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "service-account-withauth@status.im", "serviceAccountClientId" : "withAuth", "credentials" : [ ], "disableableCredentialTypes" : [ ], @@ -2166,7 +2174,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2184,7 +2192,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2274,7 +2282,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3", + "id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2296,7 +2304,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4da99e29-371e-4f4b-a863-e5079f30a714", + "id" : "ddf80243-ec40-4c21-ae94-2967d841f84c", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2325,7 +2333,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d398c928-e201-4e8b-ab09-289bb351cd2e", + "id" : "4f075680-46b7-49eb-b94c-d7425f105cb9", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2347,7 +2355,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d", + "id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2369,7 +2377,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "98013bc1-e4dd-41f7-9849-1f898143b944", + "id" : "07536fec-8d41-4c73-845f-ca85002022e0", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2391,7 +2399,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2", + "id" : "f123f912-71fb-4596-97f9-c0628a59413d", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2413,7 +2421,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2470e6f4-9a01-476a-9057-75d78e577182", + "id" : "03c26cc5-366b-462d-9297-b4016f8d7c57", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2435,7 +2443,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9", + "id" : "1b4f474e-aa64-45cc-90f1-63504585d89c", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2458,7 +2466,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "97c83e43-cba8-4d92-b108-9181bca07a1e", + "id" : "38024dd6-daff-45de-8782-06b07b7bfa56", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2480,7 +2488,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "fbabd64c-20de-4b8c-bfd2-be6822572278", + "id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -2516,7 +2524,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0628a99f-b194-495d-8e54-cc4ca8684956", + "id" : "92e3571d-ac3e-4e79-a391-5315954e866f", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -2552,7 +2560,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a", + "id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -2581,7 +2589,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa", + "id" : "95d2f1ff-6907-47ce-a93c-db462fe04844", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -2596,7 +2604,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9", + "id" : "27405ee8-5730-419c-944c-a7c67edd91ce", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -2619,7 +2627,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9", + "id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -2641,7 +2649,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a", + "id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -2663,7 +2671,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03", + "id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -2679,7 +2687,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3", + "id" : "99593c1e-f2a5-4198-ad41-634694259110", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -2715,7 +2723,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828", + "id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -2751,7 +2759,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c", + "id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -2767,13 +2775,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "817a93da-29df-447f-ab05-cd9557e66745", + "id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878", + "id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/migrations/versions/b99a4cb94b5b_.py b/spiffworkflow-backend/migrations/versions/907bcf0c3d75_.py similarity index 96% rename from spiffworkflow-backend/migrations/versions/b99a4cb94b5b_.py rename to spiffworkflow-backend/migrations/versions/907bcf0c3d75_.py index ec3592540..552afe485 100644 --- a/spiffworkflow-backend/migrations/versions/b99a4cb94b5b_.py +++ b/spiffworkflow-backend/migrations/versions/907bcf0c3d75_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: b99a4cb94b5b +Revision ID: 907bcf0c3d75 Revises: -Create Date: 2022-12-20 10:45:08.295317 +Create Date: 2022-12-28 13:52:13.030028 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'b99a4cb94b5b' +revision = '907bcf0c3d75' down_revision = None branch_labels = None depends_on = None @@ -72,16 +72,15 @@ def upgrade(): op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=255), nullable=False), - sa.Column('uid', sa.String(length=50), nullable=True), - sa.Column('service', sa.String(length=50), nullable=False), + sa.Column('service', sa.String(length=255), nullable=False), sa.Column('service_id', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('display_name', sa.String(length=255), nullable=True), sa.Column('email', sa.String(length=255), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('service', 'service_id', name='service_key'), - sa.UniqueConstraint('uid') + sa.UniqueConstraint('username') ) op.create_table('message_correlation_property', sa.Column('id', sa.Integer(), nullable=False), @@ -176,6 +175,14 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique') ) + op.create_table('user_group_assignment_waiting', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique') + ) op.create_table('human_task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -309,6 +316,7 @@ def downgrade(): op.drop_table('message_correlation') op.drop_index(op.f('ix_human_task_completed'), table_name='human_task') op.drop_table('human_task') + op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment') op.drop_table('secret') op.drop_table('refresh_token') diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 5bcb2d0f4..707c5b3c3 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -654,7 +654,7 @@ werkzeug = "*" type = "git" url = "https://github.com/sartography/flask-bpmn" reference = "main" -resolved_reference = "0f2d249d0e799bec912d46132e9ef9754fdacbd7" +resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b" [[package]] name = "Flask-Cors" @@ -1851,7 +1851,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994" +resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c" [[package]] name = "SQLAlchemy" @@ -2563,6 +2563,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2571,6 +2572,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2579,6 +2581,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, @@ -2877,10 +2880,7 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, - {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, @@ -2989,7 +2989,18 @@ psycopg2 = [ {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, ] pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ diff --git a/spiffworkflow-backend/src/.coverage.jason-Gazelle.473795.719220 b/spiffworkflow-backend/src/.coverage.jason-Gazelle.473795.719220 new file mode 100644 index 000000000..3c5fc7087 Binary files /dev/null and b/spiffworkflow-backend/src/.coverage.jason-Gazelle.473795.719220 differ diff --git a/spiffworkflow-backend/src/.coverage.jason-Gazelle.475245.497833 b/spiffworkflow-backend/src/.coverage.jason-Gazelle.475245.497833 new file mode 100644 index 000000000..214df28dc Binary files /dev/null and b/spiffworkflow-backend/src/.coverage.jason-Gazelle.475245.497833 differ diff --git a/spiffworkflow-backend/src/.coverage.jason-Gazelle.476451.578823 b/spiffworkflow-backend/src/.coverage.jason-Gazelle.476451.578823 new file mode 100644 index 000000000..ef7f5c499 Binary files /dev/null and b/spiffworkflow-backend/src/.coverage.jason-Gazelle.476451.578823 differ diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 9599116a2..f1de793d4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -18,11 +18,11 @@ from werkzeug.exceptions import NotFound import spiffworkflow_backend.load_database_models # noqa: F401 from spiffworkflow_backend.config import setup_config +from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import ( openid_blueprint, ) -from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user_blueprint import user_blueprint from spiffworkflow_backend.services.authorization_service import AuthorizationService @@ -93,7 +93,8 @@ def create_app() -> flask.app.Flask: if os.environ.get("FLASK_SESSION_SECRET_KEY") is None: raise KeyError( - "Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY" + "Cannot find the secret_key from the environment. Please set" + " FLASK_SESSION_SECRET_KEY" ) app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY") @@ -103,7 +104,6 @@ def create_app() -> flask.app.Flask: migrate.init_app(app, db) app.register_blueprint(user_blueprint) - app.register_blueprint(process_api_blueprint) app.register_blueprint(api_error_blueprint) app.register_blueprint(admin_blueprint, url_prefix="/admin") app.register_blueprint(openid_blueprint, url_prefix="/openid") @@ -117,7 +117,7 @@ def create_app() -> flask.app.Flask: ] CORS(app, origins=origins_re, max_age=3600) - connexion_app.add_api("api.yml", base_path="/v1.0") + connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX) mail = Mail(app) app.config["MAIL_APP"] = mail diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 12b40160e..d96de3dbc 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -8,10 +8,6 @@ servers: - url: http://localhost:5000/v1.0 # this is handled in flask now security: [] -# - jwt: ["secret"] -# - oAuth2AuthCode: -# - read_email -# - uid paths: /login: @@ -22,7 +18,6 @@ paths: schema: type: string get: - security: [] summary: redirect to open id authentication server operationId: spiffworkflow_backend.routes.user.login tags: @@ -48,7 +43,6 @@ paths: schema: type: string get: - security: [] operationId: spiffworkflow_backend.routes.user.login_return tags: - Authentication @@ -68,7 +62,6 @@ paths: schema: type: string get: - security: [] operationId: spiffworkflow_backend.routes.user.logout summary: Logout authenticated user tags: @@ -78,7 +71,6 @@ paths: description: Logout Authenticated User /logout_return: get: - security: [] operationId: spiffworkflow_backend.routes.user.logout_return summary: Logout authenticated user tags: @@ -89,7 +81,6 @@ paths: /login_api: get: - security: [] operationId: spiffworkflow_backend.routes.user.login_api summary: Authenticate user for API access tags: @@ -115,7 +106,6 @@ paths: schema: type: string get: - security: [] operationId: spiffworkflow_backend.routes.user.login_api_return tags: - Authentication @@ -125,8 +115,7 @@ paths: /status: get: - security: [] - operationId: spiffworkflow_backend.routes.process_api_blueprint.status + operationId: spiffworkflow_backend.routes.health_controller.status summary: Returns 200 if the server is Responding tags: - Liveness @@ -160,7 +149,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_list + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_list summary: get list tags: - Process Groups @@ -174,7 +163,7 @@ paths: items: $ref: "#/components/schemas/ProcessModelCategory" post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_create summary: Add process group tags: - Process Groups @@ -201,7 +190,7 @@ paths: type: string # process_group_show get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_show + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_show summary: Returns a single process group tags: - Process Groups @@ -213,7 +202,7 @@ paths: schema: $ref: "#/components/schemas/ProcessModelCategory" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_delete + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_delete summary: Deletes a single process group tags: - Process Groups @@ -221,7 +210,7 @@ paths: "200": description: The process group was deleted. put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_update + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_update summary: Updates a single process group tags: - Process Groups @@ -253,7 +242,7 @@ paths: schema: type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_move + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_move summary: returns the new group tags: - Process Groups @@ -285,6 +274,12 @@ paths: description: Get only the process models that the user can run schema: type: boolean + - name: include_parent_groups + in: query + required: false + description: Get the display names for the parent groups as well + schema: + type: boolean - name: page in: query required: false @@ -298,7 +293,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_list + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_list summary: Return a list of process models for a given process group tags: - Process Models @@ -321,7 +316,33 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_create + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create + summary: Creates a new process model with the given parameters. + tags: + - Process Models + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + responses: + "201": + description: Process model created successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + + /process-models-natural-language/{modified_process_group_id}: + parameters: + - name: modified_process_group_id + in: path + required: true + description: modified id of an existing process group + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create_with_natural_language summary: Creates a new process model with the given parameters. tags: - Process Models @@ -347,7 +368,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_create summary: Add a new workflow spec file tags: - Process Model Files @@ -377,7 +398,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_show summary: Returns a single process model tags: - Process Models @@ -389,7 +410,7 @@ paths: schema: $ref: "#/components/schemas/ProcessModel" put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_update summary: Modifies an existing process model with the given parameters. tags: - Process Models @@ -406,7 +427,7 @@ paths: schema: $ref: "#/components/schemas/ProcessModel" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_delete summary: Removes an existing process model tags: - Process Models @@ -433,7 +454,7 @@ paths: schema: type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_move + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_move summary: returns the new model tags: - Process Models @@ -460,7 +481,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_publish + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_publish summary: Merge changes from this model to another branch. tags: - Process Models @@ -608,7 +629,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list_for_me + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me summary: Returns a list of process instances that are associated with me. tags: - Process Instances @@ -721,7 +742,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list summary: Returns a list of process instances. tags: - Process Instances @@ -744,7 +765,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_create + operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_create summary: Create script unit test based on given criteria tags: - Script Unit Test @@ -765,7 +786,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_run + operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_run summary: Run a given script unit test. tags: - Script Unit Test @@ -786,7 +807,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_create summary: Creates an process instance from a process model and returns the instance tags: - Process Instances @@ -833,7 +854,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data_for_me + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data_for_me summary: returns the list of all user tasks associated with process instance without the task data responses: "200": @@ -880,7 +901,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data summary: returns the list of all user tasks associated with process instance without the task data responses: "200": @@ -915,7 +936,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show_for_me + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show_for_me summary: Show information about a process instance that is associated with me responses: "200": @@ -948,7 +969,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show summary: Show information about a process instance responses: "200": @@ -958,7 +979,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_delete summary: Deletes a single process instance tags: - Process Instances @@ -985,7 +1006,7 @@ paths: schema: type: boolean post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_run + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_run summary: Run a process instance tags: - Process Instances @@ -1006,7 +1027,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_terminate + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_terminate summary: Terminate a process instance tags: - Process Instances @@ -1027,7 +1048,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_suspend + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_suspend summary: Suspend a process instance tags: - Process Instances @@ -1048,7 +1069,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_resume + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_resume summary: Resume a process instance tags: - Process Instances @@ -1060,6 +1081,39 @@ paths: schema: $ref: "#/components/schemas/OkTrue" + /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified process model id + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: spiff_step + in: query + required: false + description: Reset the process to this state + schema: + type: integer + post: + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset + summary: Reset a process instance to an earlier step + tags: + - Process Instances + responses: + "200": + description: Empty ok true response on successful resume. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + /process-instances/reports: parameters: - name: page @@ -1075,7 +1129,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_list summary: Returns all process instance reports for process model tags: - Process Instances @@ -1089,7 +1143,7 @@ paths: items: $ref: "#/components/schemas/Workflow" post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_create summary: Returns all process instance reports for process model tags: - Process Instances @@ -1103,7 +1157,7 @@ paths: /process-instances/reports/columns: get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list summary: Returns all available columns for a process instance report. tags: - Process Instances @@ -1138,7 +1192,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_show summary: Returns a report of process instances for a given process model tags: - Process Instances @@ -1152,7 +1206,7 @@ paths: items: $ref: "#/components/schemas/Workflow" put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_update summary: Updates a process instance report tags: - Process Instances @@ -1164,7 +1218,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_delete + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_delete summary: Delete a process instance report tags: - Process Instances @@ -1191,7 +1245,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_show summary: Returns metadata about the file tags: - Process Model Files @@ -1203,7 +1257,7 @@ paths: schema: $ref: "#/components/schemas/File" put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_update + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_update summary: save the contents to the given file tags: - Process Model Files @@ -1226,7 +1280,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_delete + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_delete summary: Removes an existing process model file tags: - Process Model Files @@ -1255,8 +1309,7 @@ paths: get: tags: - Tasks - # security: [] - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_my_tasks + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_my_tasks summary: returns the list of ready or waiting tasks for a user responses: "200": @@ -1285,7 +1338,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_open_processes + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_open_processes summary: returns the list of tasks for given user's open process instances responses: "200": @@ -1314,7 +1367,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_me + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_me summary: returns the list of tasks for given user's open process instances responses: "200": @@ -1349,7 +1402,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_groups + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_groups summary: returns the list of tasks for given user's open process instances responses: "200": @@ -1361,11 +1414,34 @@ paths: items: $ref: "#/components/schemas/Task" + /users/search: + parameters: + - name: username_prefix + in: query + required: true + description: The prefix of the user + schema: + type: string + get: + tags: + - Users + operationId: spiffworkflow_backend.routes.users_controller.user_search + summary: Returns a list of users that the search param + responses: + "200": + description: list of users + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/User" + /user-groups/for-current-user: get: tags: - - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.user_group_list_for_current_user + - User Groups + operationId: spiffworkflow_backend.routes.users_controller.user_group_list_for_current_user summary: Group identifiers for current logged in user responses: "200": @@ -1406,7 +1482,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_with_task_data + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_with_task_data summary: returns the list of all user tasks associated with process instance with the task data responses: "200": @@ -1439,7 +1515,7 @@ paths: schema: type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data + operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update summary: Update the task data for requested instance and task tags: - Process Instances @@ -1451,11 +1527,104 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-data/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_data_identifier + in: path + required: true + description: The identifier of the process data. + schema: + type: string + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_show + summary: Fetch the process data value. + tags: + - Data Objects + responses: + "200": + description: Fetch succeeded. + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /send-event/{modified_process_model_identifier}/{process_instance_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of the process instance + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.send_bpmn_event + summary: Send a BPMN event to the process + tags: + - Process Instances + responses: + "200": + description: Event Sent Successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of the process instance + schema: + type: string + - name: task_id + in: path + required: true + description: The unique id of the task. + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task + summary: Mark a task complete without executing it + tags: + - Process Instances + responses: + "200": + description: Event Sent Successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + /service-tasks: get: tags: - Service Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.service_task_list + operationId: spiffworkflow_backend.routes.service_tasks_controller.service_task_list summary: Gets all available service task connectors responses: "200": @@ -1469,7 +1638,7 @@ paths: get: tags: - Authentications - operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_list + operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_list summary: Gets all available authentications from connector proxy responses: "200": @@ -1506,11 +1675,9 @@ paths: schema: type: string get: - # disable security so we can get the token from query params instead - security: [] tags: - Authentications - operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_callback + operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_callback summary: Callback to backend responses: "200": @@ -1543,7 +1710,7 @@ paths: get: tags: - Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_show + operationId: spiffworkflow_backend.routes.tasks_controller.task_show summary: Gets one task that a user wants to complete responses: "200": @@ -1555,7 +1722,7 @@ paths: put: tags: - Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_submit + operationId: spiffworkflow_backend.routes.tasks_controller.task_submit summary: Update the form data for a tasks requestBody: content: @@ -1599,7 +1766,7 @@ paths: get: tags: - Messages - operationId: spiffworkflow_backend.routes.process_api_blueprint.message_instance_list + operationId: spiffworkflow_backend.routes.messages_controller.message_instance_list summary: Get a list of message instances responses: "200": @@ -1620,7 +1787,7 @@ paths: post: tags: - Messages - operationId: spiffworkflow_backend.routes.process_api_blueprint.message_start + operationId: spiffworkflow_backend.routes.messages_controller.message_start summary: Instantiate and run a given process model with a message start event matching given identifier requestBody: content: @@ -1664,7 +1831,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_log_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_log_list summary: returns a list of logs associated with the process instance responses: "200": @@ -1689,7 +1856,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.add_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_create summary: Create a secret for a key and value tags: - Secrets @@ -1706,7 +1873,7 @@ paths: schema: type: number get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_list + operationId: spiffworkflow_backend.routes.secrets_controller.secret_list summary: Return list of all secrets tags: - Secrets @@ -1727,7 +1894,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.get_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_show summary: Return a secret value for a key tags: - Secrets @@ -1739,7 +1906,7 @@ paths: schema: $ref: "#/components/schemas/Secret" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_delete summary: Delete an existing secret tags: - Secrets @@ -1751,7 +1918,7 @@ paths: "404": description: Secret does not exist put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.update_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_update summary: Modify an existing secret tags: - Secrets @@ -1810,16 +1977,6 @@ components: scopes: read_email: read email x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope - # oAuth2AuthCode: - # type: oauth2 - # description: authenticate with openid server - # flows: - # implicit: - # authorizationUrl: /v1.0/login_api - # scopes: - # uid: uid - # x-tokenInfoUrl: localhost:7000/v1.0/login_api_return - # x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope schemas: OkTrue: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index 106b07357..fb5901f03 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None: if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None: database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}" if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite": - app.config[ - "SQLALCHEMY_DATABASE_URI" - ] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" + app.config["SQLALCHEMY_DATABASE_URI"] = ( + f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" + ) elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres": - app.config[ - "SQLALCHEMY_DATABASE_URI" - ] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" + app.config["SQLALCHEMY_DATABASE_URI"] = ( + f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" + ) else: # use pswd to trick flake8 with hardcoded passwords db_pswd = os.environ.get("DB_PASSWORD") if db_pswd is None: db_pswd = "" - app.config[ - "SQLALCHEMY_DATABASE_URI" - ] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" + app.config["SQLALCHEMY_DATABASE_URI"] = ( + f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" + ) else: app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get( "SPIFFWORKFLOW_BACKEND_DATABASE_URI" @@ -42,6 +42,7 @@ def load_config_file(app: Flask, env_config_module: str) -> None: """Load_config_file.""" try: app.config.from_object(env_config_module) + print(f"loaded config: {env_config_module}") except ImportStringError as exception: if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true": raise ModuleNotFoundError( @@ -62,6 +63,7 @@ def setup_config(app: Flask) -> None: ) app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False app.config.from_object("spiffworkflow_backend.config.default") + print("loaded config: default") env_config_prefix = "spiffworkflow_backend.config." if ( @@ -69,6 +71,7 @@ def setup_config(app: Flask) -> None: and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None ): load_config_file(app, f"{env_config_prefix}terraform_deployed_environment") + print("loaded config: terraform_deployed_environment") env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"] load_config_file(app, env_config_module) @@ -87,6 +90,14 @@ def setup_config(app: Flask) -> None: "permissions", app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"], ) + print( + "set permissions file name config:" + f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}" + ) + print( + "set permissions file name full path:" + f" {app.config['PERMISSIONS_FILE_FULLPATH']}" + ) # unversioned (see .gitignore) config that can override everything and include secrets. # src/spiffworkflow_backend/config/secrets.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py index ce6b516c0..cbbc269a8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py @@ -6,3 +6,4 @@ GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-commit GIT_USER_EMAIL = environ.get( "GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com" ) +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml index 65ba240ab..29d3c9c04 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml @@ -1,13 +1,10 @@ groups: admin: - users: [ciadmin1] - - common-user: - users: [ciuser1] + users: [ciadmin1@spiffworkflow.org] permissions: admin: - groups: [admin, common-user] + groups: [admin] users: [] allowed_permissions: [create, read, update, delete] uri: /* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml new file mode 100644 index 000000000..a556c0139 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml @@ -0,0 +1,151 @@ +default_group: everybody + +groups: + admin: + users: + [ + admin@spiffworkflow.org, + jakub@status.im, + jarrad@status.im, + kb@sartography.com, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, + ] + + Finance Team: + users: + [ + jakub@status.im, + amir@status.im, + jarrad@status.im, + sasha@status.im, + fin@status.im, + fin1@status.im, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, + ] + + demo: + users: + [ + harmeet@status.im, + sasha@status.im, + manuchehr@status.im, + core@status.im, + fin@status.im, + fin1@status.im, + lead@status.im, + lead1@status.im, + ] + + test: + users: + [ + natalia@sartography.com, + ] + +permissions: + admin: + groups: [admin] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /* + + # open system defaults for everybody + read-all-process-groups: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-groups/* + read-all-process-models: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-models/* + + # basic perms for everybody + read-all-process-instances-for-me: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/* + read-process-instance-reports: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-instances/reports/* + processes-read: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /processes + service-tasks: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /service-tasks + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /tasks/* + user-groups-for-current-user: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /user-groups/for-current-user + + + finance-admin: + groups: ["Finance Team"] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-groups/manage-procurement:procurement:* + + manage-revenue-streams-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:vendor-lifecycle-management:* + + manage-revenue-streams-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* + + create-test-instances: + groups: ["test"] + users: [] + allowed_permissions: [create, read] + uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index e334cc98b..ee40f839b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -10,72 +10,68 @@ groups: admin: users: [ - admin, - jakub, - kb, - alex, - dan, - mike, - jason, - jarrad, - elizabeth, - jon, + admin@spiffworkflow.org, + jakub@status.im, + jarrad@status.im, + kb@sartography.com, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, ] Finance Team: users: [ - jakub, - alex, - dan, - mike, - jason, - amir, - jarrad, - elizabeth, - jon, - sasha, - fin, - fin1, + jakub@status.im, + amir@status.im, + jarrad@status.im, + sasha@status.im, + fin@status.im, + fin1@status.im, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, ] demo: users: [ - core, - fin, - fin1, - harmeet, - jason, - sasha, - manuchehr, - lead, - lead1 + harmeet@status.im, + sasha@status.im, + manuchehr@status.im, + core@status.im, + fin@status.im, + fin1@status.im, + lead@status.im, + lead1@status.im, ] - core-contributor: + test: users: [ - core, - harmeet, + natalia@sartography.com, ] admin-ro: users: [ - j, + j@sartography.com, ] - test: - users: [natalia] - permissions: admin: groups: [admin] users: [] allowed_permissions: [create, read, update, delete] uri: /* - admin-readonly: groups: [admin-ro] users: [] @@ -85,121 +81,93 @@ permissions: groups: [admin-ro] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/* + uri: /process-instances/* - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/service-tasks - user-groups-for-current-user: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/user-groups/for-current-user - - - # read all for everybody + # open system defaults for everybody read-all-process-groups: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-groups/* + uri: /process-groups/* read-all-process-models: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-models/* + uri: /process-models/* + + # basic perms for everybody read-all-process-instances-for-me: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-instances/for-me/* + uri: /process-instances/for-me/* read-process-instance-reports: groups: [everybody] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/reports/* + uri: /process-instances/reports/* processes-read: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/processes + uri: /processes + service-tasks: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /service-tasks + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /tasks/* + user-groups-for-current-user: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /user-groups/for-current-user - manage-procurement-admin: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:* - manage-procurement-admin-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement/* - manage-procurement-admin-models: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement:* - manage-procurement-admin-models-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement/* - manage-procurement-admin-instances: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement:* - manage-procurement-admin-instances-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement/* - finance-admin: groups: ["Finance Team"] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:procurement:* + uri: /process-groups/manage-procurement:procurement:* manage-revenue-streams-instances: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - + allowed_permissions: [create] + uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* manage-procurement-invoice-instances: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - + allowed_permissions: [create] + uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* manage-procurement-instances: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* + allowed_permissions: [create] + uri: /process-instances/manage-procurement:vendor-lifecycle-management:* + + manage-revenue-streams-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* create-test-instances: groups: ["test"] users: [] allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:test:* - - core1-admin-instances: - groups: ["core-contributor", "Finance Team"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:* - core1-admin-instances-slash: - groups: ["core-contributor", "Finance Team"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/* + uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml index 79bfed81d..248a400b4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml @@ -2,14 +2,17 @@ default_group: everybody users: admin: + service: local_open_id email: admin@spiffworkflow.org password: admin preferred_username: Admin nelson: + service: local_open_id email: nelson@spiffworkflow.org password: nelson preferred_username: Nelson malala: + service: local_open_id email: malala@spiffworkflow.org password: malala preferred_username: Malala @@ -18,17 +21,17 @@ groups: admin: users: [ - admin, + admin@spiffworkflow.org, ] Education: users: [ - malala + malala@spiffworkflow.org ] President: users: [ - nelson + nelson@spiffworkflow.org ] permissions: @@ -44,45 +47,44 @@ permissions: groups: [everybody] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* + uri: /tasks/* # Everyone can see everything (all groups, and processes are visible) read-all-process-groups: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-groups/* + uri: /process-groups/* read-all-process-models: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-models/* + uri: /process-models/* read-all-process-instance: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-instances/* + uri: /process-instances/* read-process-instance-reports: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-instances/reports/* + uri: /process-instances/reports/* processes-read: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/processes - - # Members of the Education group can change they processes work. + uri: /processes + # Members of the Education group can change the processes under "education". education-admin: groups: ["Education", "President"] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/education:* + uri: /process-groups/education:* # Anyone can start an education process. education-everybody: groups: [everybody] users: [] allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/* + uri: /process-instances/misc:category_number_one:process-model-with-form/* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml new file mode 100644 index 000000000..049c991ed --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml @@ -0,0 +1,12 @@ +default_group: everybody + +groups: + admin: + users: [admin@spiffworkflow.org] + +permissions: + admin: + groups: [admin] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml index 20635ea2e..9816ca939 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml @@ -4,57 +4,53 @@ groups: admin: users: [ - admin, - jakub, - kb, - alex, - dan, - mike, - jason, - j, - jarrad, - elizabeth, - jon, - natalia, + admin@spiffworkflow.org, + jakub@status.im, + jarrad@status.im, + kb@sartography.com, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, ] Finance Team: users: [ - jakub, - alex, - dan, - mike, - jason, - j, - amir, - jarrad, - elizabeth, - jon, - natalia, - sasha, - fin, - fin1, + jakub@status.im, + amir@status.im, + jarrad@status.im, + sasha@status.im, + fin@status.im, + fin1@status.im, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, ] demo: users: [ - core, - fin, - fin1, - harmeet, - sasha, - manuchehr, - lead, - lead1 + harmeet@status.im, + sasha@status.im, + manuchehr@status.im, + core@status.im, + fin@status.im, + fin1@status.im, + lead@status.im, + lead1@status.im, ] - - core-contributor: + test: users: [ - core, - harmeet, + natalia@sartography.com, ] permissions: @@ -67,104 +63,86 @@ permissions: groups: [admin] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/* + uri: /process-instances/* - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* - - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/service-tasks - user-groups-for-current-user: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/user-groups/for-current-user - - - # read all for everybody + # open system defaults for everybody read-all-process-groups: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-groups/* + uri: /process-groups/* read-all-process-models: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-models/* + uri: /process-models/* + + # basic perms for everybody read-all-process-instances-for-me: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-instances/for-me/* - manage-process-instance-reports: + uri: /process-instances/for-me/* + read-process-instance-reports: groups: [everybody] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/reports/* + uri: /process-instances/reports/* processes-read: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/processes - - - manage-procurement-admin-instances: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement:* - manage-procurement-admin-instances-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement/* - manage-procurement-admin-instance-logs: - groups: ["Project Lead"] + uri: /processes + service-tasks: + groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/logs/manage-procurement:* - manage-procurement-admin-instance-logs-slash: - groups: ["Project Lead"] + uri: /service-tasks + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /tasks/* + user-groups-for-current-user: + groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/logs/manage-procurement/* + uri: /user-groups/for-current-user manage-revenue-streams-instances: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-revenue-streams-instance-logs: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [read] - uri: /v1.0/logs/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - + allowed_permissions: [create] + uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* manage-procurement-invoice-instances: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-invoice-instance-logs: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [read] - uri: /v1.0/logs/manage-procurement:procurement:core-contributor-invoice-management:* - + allowed_permissions: [create] + uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* manage-procurement-instances: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* - manage-procurement-instance-logs: - groups: ["core-contributor", "demo"] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:vendor-lifecycle-management:* + + manage-revenue-streams-instances-for-me: + groups: ["demo"] users: [] allowed_permissions: [read] - uri: /v1.0/logs/manage-procurement:vendor-lifecycle-management:* + uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* + + create-test-instances: + groups: ["test"] + users: [] + allowed_permissions: [create, read] + uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml index fc118b900..049c991ed 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml @@ -2,60 +2,7 @@ default_group: everybody groups: admin: - users: - [ - admin, - jakub, - kb, - alex, - dan, - mike, - jason, - j, - jarrad, - elizabeth, - jon, - ] - - Finance Team: - users: - [ - jakub, - alex, - dan, - mike, - jason, - j, - amir, - jarrad, - elizabeth, - jon, - sasha, - fin, - fin1, - ] - - demo: - users: - [ - core, - fin, - fin1, - harmeet, - sasha, - manuchehr, - lead, - lead1 - ] - - core-contributor: - users: - [ - core, - harmeet, - ] - test: - users: [natalia] + users: [admin@spiffworkflow.org] permissions: admin: @@ -63,110 +10,3 @@ permissions: users: [] allowed_permissions: [create, read, update, delete] uri: /* - - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* - - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/service-tasks - user-groups-for-current-user: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/user-groups/for-current-user - - - # read all for everybody - read-all-process-groups: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-groups/* - read-all-process-models: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-models/* - read-all-process-instances-for-me: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-instances/for-me/* - read-process-instance-reports: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/reports/* - processes-read: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/processes - - - manage-procurement-admin: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:* - manage-procurement-admin-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement/* - manage-procurement-admin-models: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement:* - manage-procurement-admin-models-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement/* - manage-procurement-admin-instances: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement:* - manage-procurement-admin-instances-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement/* - - finance-admin: - groups: ["Finance Team"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:procurement:* - - manage-revenue-streams-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - - manage-procurement-invoice-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - - manage-procurement-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* - - create-test-instances: - groups: ["test"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml index c678205df..79a137104 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml @@ -1,5 +1,12 @@ default_group: everybody +users: + testadmin1: + service: https://testing/openid/thing + email: testadmin1@spiffworkflow.org + password: admin + preferred_username: El administrador de la muerte + groups: admin: users: [testadmin1, testadmin2] @@ -14,7 +21,7 @@ permissions: admin: groups: [admin] users: [] - allowed_permissions: [create, read, update, delete, list, instantiate] + allowed_permissions: [create, read, update, delete] uri: /* read-all: @@ -27,29 +34,29 @@ permissions: groups: [everybody] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* + uri: /tasks/* # TODO: all uris should really have the same structure finance-admin-group: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/finance/* + uri: /process-groups/finance/* finance-admin-model: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/finance/* + uri: /process-models/finance/* finance-admin-model-lanes: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/finance:model_with_lanes/* + uri: /process-models/finance:model_with_lanes/* finance-admin-instance-run: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/* + uri: /process-instances/* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py new file mode 100644 index 000000000..2f8ad5fca --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py @@ -0,0 +1,11 @@ +"""Qa1.""" +from os import environ + +GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2") +GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer") +GIT_USER_EMAIL = environ.get( + "GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com" +) +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( + "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml" +) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py index 9cc247056..807163315 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py @@ -1,7 +1,7 @@ """Staging.""" from os import environ -GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging") +GIT_BRANCH = environ.get("GIT_BRANCH", default="staging") GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main") GIT_COMMIT_ON_SAVE = False SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/api_version.py b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/api_version.py new file mode 100644 index 000000000..607b6c16b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/api_version.py @@ -0,0 +1,2 @@ +"""Api_version.""" +V1_API_PATH_PREFIX = "/v1.0" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/interfaces.py b/spiffworkflow-backend/src/spiffworkflow_backend/interfaces.py new file mode 100644 index 000000000..3d5280420 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/interfaces.py @@ -0,0 +1,24 @@ +"""Interfaces.""" +from typing import NewType +from typing import TYPE_CHECKING +from typing import TypedDict + +if TYPE_CHECKING: + from spiffworkflow_backend.models.process_group import ProcessGroup + + +IdToProcessGroupMapping = NewType("IdToProcessGroupMapping", dict[str, "ProcessGroup"]) + + +class ProcessGroupLite(TypedDict): + """ProcessGroupLite.""" + + id: str + display_name: str + + +class ProcessGroupLitesWithCache(TypedDict): + """ProcessGroupLitesWithCache.""" + + cache: dict[str, "ProcessGroup"] + process_groups: list[ProcessGroupLite] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py index 3b7edd6ce..980fc9302 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py @@ -27,6 +27,9 @@ class GroupModel(FlaskBpmnGroupModel): identifier = db.Column(db.String(255)) user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") + user_group_assignments_waiting = relationship( # type: ignore + "UserGroupAssignmentWaitingModel", cascade="delete" + ) users = relationship( # type: ignore "UserModel", viewonly=True, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py index 940a51fc0..f74da5cca 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py @@ -35,9 +35,9 @@ class HumanTaskModel(SpiffworkflowBaseDBModel): ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id)) - completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) + completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore - actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) + actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) # type: ignore # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) form_file_name: str | None = db.Column(db.String(50)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py index 7d98880fc..31823af82 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py @@ -29,4 +29,4 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel): human_task_id = db.Column( ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore ) - user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) + user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py index 2559a6352..b0cc2aa34 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py @@ -86,5 +86,6 @@ def ensure_failure_cause_is_set_if_message_instance_failed( if isinstance(instance, MessageInstanceModel): if instance.status == "failed" and instance.failure_cause is None: raise ValueError( - f"{instance.__class__.__name__}: failure_cause must be set if status is failed" + f"{instance.__class__.__name__}: failure_cause must be set if" + " status is failed" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py index 63295f74e..04dfb5fac 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py @@ -32,14 +32,6 @@ class Permission(enum.Enum): update = "update" delete = "delete" - # maybe read to GET process_model/process-instances instead? - list = "list" - - # maybe use create instead on - # POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/* - # POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/332/run - instantiate = "instantiate" # this is something you do to a process model - class PermissionAssignmentModel(SpiffworkflowBaseDBModel): """PermissionAssignmentModel.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/principal.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/principal.py index c7efa8609..ac8ee6a4e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/principal.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/principal.py @@ -27,7 +27,7 @@ class PrincipalModel(SpiffworkflowBaseDBModel): __table_args__ = (CheckConstraint("NOT(user_id IS NULL AND group_id IS NULL)"),) id = db.Column(db.Integer, primary_key=True) - user_id = db.Column(ForeignKey(UserModel.id), nullable=True, unique=True) + user_id = db.Column(ForeignKey(UserModel.id), nullable=True, unique=True) # type: ignore group_id = db.Column(ForeignKey(GroupModel.id), nullable=True, unique=True) user = relationship("UserModel", viewonly=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py index 1439b0459..63c851a5b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py @@ -11,6 +11,7 @@ import marshmallow from marshmallow import post_load from marshmallow import Schema +from spiffworkflow_backend.interfaces import ProcessGroupLite from spiffworkflow_backend.models.process_model import ProcessModelInfo @@ -29,7 +30,7 @@ class ProcessGroup: default_factory=list[ProcessModelInfo] ) process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"]) - parent_groups: list[dict] | None = None + parent_groups: list[ProcessGroupLite] | None = None def __post_init__(self) -> None: """__post_init__.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index f41897fd7..31912c306 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -57,13 +57,21 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): process_model_display_name: str = db.Column( db.String(255), nullable=False, index=True ) - process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) + process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore process_initiator = relationship("UserModel") + active_human_tasks = relationship( + "HumanTaskModel", + primaryjoin=( + "and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id," + " HumanTaskModel.completed == False)" + ), + ) # type: ignore + human_tasks = relationship( "HumanTaskModel", cascade="delete", - primaryjoin="and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)", + overlaps="active_human_tasks", ) # type: ignore message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index 1f22a3830..b1288b3f1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -70,7 +70,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) identifier: str = db.Column(db.String(50), nullable=False, index=True) report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore - created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) + created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore created_by = relationship("UserModel") created_at_in_seconds = db.Column(db.Integer) updated_at_in_seconds = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py index e8d5eed1c..c737b274b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py @@ -11,6 +11,7 @@ import marshmallow from marshmallow import Schema from marshmallow.decorators import post_load +from spiffworkflow_backend.interfaces import ProcessGroupLite from spiffworkflow_backend.models.file import File @@ -37,7 +38,7 @@ class ProcessModelInfo: files: list[File] | None = field(default_factory=list[File]) fault_or_suspend_on_exception: str = NotificationType.fault.value exception_notification_addresses: list[str] = field(default_factory=list) - parent_groups: list[dict] | None = None + parent_groups: list[ProcessGroupLite] | None = None metadata_extraction_paths: list[dict[str, str]] | None = None def __post_init__(self) -> None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py index 92fd470a3..91a4f23bb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py @@ -17,7 +17,7 @@ class SecretModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) key: str = db.Column(db.String(50), unique=True, nullable=False) value: str = db.Column(db.Text(), nullable=False) - user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) + user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 60deda842..79814c1d5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -43,8 +43,8 @@ class Task: FIELD_TYPE_EMAIL = "email" # email: Email address FIELD_TYPE_URL = "url" # url: Website address - FIELD_PROP_AUTO_COMPLETE_MAX = ( - "autocomplete_num" # Not used directly, passed in from the front end. + FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end. + "autocomplete_num" ) # Required field @@ -77,8 +77,8 @@ class Task: # File specific field properties FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code - FIELD_PROP_FILE_DATA = ( - "file_data" # to associate a bit of data with a specific file upload file. + FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file. + "file_data" ) # Additional properties @@ -118,6 +118,7 @@ class Task: form_schema: Union[str, None] = None, form_ui_schema: Union[str, None] = None, parent: Optional[str] = None, + event_definition: Union[dict[str, Any], None] = None, call_activity_process_identifier: Optional[str] = None, ): """__init__.""" @@ -130,6 +131,7 @@ class Task: self.documentation = documentation self.lane = lane self.parent = parent + self.event_definition = event_definition self.call_activity_process_identifier = call_activity_process_identifier self.data = data @@ -189,6 +191,7 @@ class Task: "form_schema": self.form_schema, "form_ui_schema": self.form_ui_schema, "parent": self.parent, + "event_definition": self.event_definition, "call_activity_process_identifier": self.call_activity_process_identifier, } @@ -290,6 +293,7 @@ class TaskSchema(Schema): "process_instance_id", "form_schema", "form_ui_schema", + "event_definition", ] multi_instance_type = EnumField(MultiInstanceType) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index 5fa09896d..c4838aafa 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -1,41 +1,38 @@ """User.""" from __future__ import annotations -from typing import Any +from dataclasses import dataclass import jwt import marshmallow from flask import current_app -from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from marshmallow import Schema from sqlalchemy.orm import relationship -from sqlalchemy.orm import validates from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.services.authentication_service import ( - AuthenticationProviderTypes, -) class UserNotFoundError(Exception): """UserNotFoundError.""" +@dataclass class UserModel(SpiffworkflowBaseDBModel): """UserModel.""" __tablename__ = "user" __table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),) - id = db.Column(db.Integer, primary_key=True) - # server and service id must be unique, not username. - username = db.Column(db.String(255), nullable=False, unique=False) - uid = db.Column(db.String(50), unique=True) - service = db.Column(db.String(50), nullable=False, unique=False) + id: int = db.Column(db.Integer, primary_key=True) + username: str = db.Column(db.String(255), nullable=False, unique=True) + + service = db.Column( + db.String(255), nullable=False, unique=False + ) # not 'openid' -- google, aws service_id = db.Column(db.String(255), nullable=False, unique=False) - name = db.Column(db.String(255)) + display_name = db.Column(db.String(255)) email = db.Column(db.String(255)) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) @@ -49,21 +46,6 @@ class UserModel(SpiffworkflowBaseDBModel): ) principal = relationship("PrincipalModel", uselist=False) # type: ignore - @validates("service") - def validate_service(self, key: str, value: Any) -> str: - """Validate_service.""" - try: - ap_type = getattr(AuthenticationProviderTypes, value, None) - except Exception as e: - raise ValueError(f"invalid service type: {value}") from e - if ap_type is not None: - ap_value: str = ap_type.value - return ap_value - raise ApiError( - error_code="invalid_service", - message=f"Could not validate service with value: {value}", - ) - def encode_auth_token(self) -> str: """Generate the Auth Token. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py index fa5b620c8..9c1567fb7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py @@ -17,7 +17,7 @@ class UserGroupAssignmentModel(SpiffworkflowBaseDBModel): ) id = db.Column(db.Integer, primary_key=True) - user_id = db.Column(ForeignKey(UserModel.id), nullable=False) + user_id = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) group = relationship("GroupModel", overlaps="groups,user_group_assignments,users") # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py new file mode 100644 index 000000000..ac2747c85 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py @@ -0,0 +1,34 @@ +"""UserGroupAssignment.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.models.group import GroupModel + + +class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel): + """When a user is assigned to a group, but that username does not exist. + + We cache it here to be applied in the event the user does log in to the system. + """ + + MATCH_ALL_USERS = "*" + __tablename__ = "user_group_assignment_waiting" + __table_args__ = ( + db.UniqueConstraint( + "username", "group_id", name="user_group_assignment_staged_unique" + ), + ) + + id = db.Column(db.Integer, primary_key=True) + username = db.Column(db.String(255), nullable=False) + group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) + + group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore + + def is_match_all(self) -> bool: + """Is_match_all.""" + if self.username == self.MATCH_ALL_USERS: + return True + return False diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py index f1223ae0d..5cb0ae89b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py @@ -141,7 +141,7 @@ def process_model_save(process_model_id: str, file_name: str) -> Union[str, Resp @admin_blueprint.route("/process-models//run", methods=["GET"]) def process_model_run(process_model_id: str) -> Union[str, Response]: """Process_model_run.""" - user = UserService.create_user("internal", "Mr. Test", username="Mr. Test") + user = UserService.create_user("Mr. Test", "internal", "Mr. Test") process_instance = ( ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_id, user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/health_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/health_controller.py new file mode 100644 index 000000000..e98311101 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/health_controller.py @@ -0,0 +1,13 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json + +import flask.wrappers +from flask.wrappers import Response + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + + +def status() -> flask.wrappers.Response: + """Status.""" + ProcessInstanceModel.query.filter().first() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py new file mode 100644 index 000000000..51290770f --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py @@ -0,0 +1,176 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Any +from typing import Dict +from typing import Optional + +import flask.wrappers +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_process_instance_by_id_or_raise, +) +from spiffworkflow_backend.services.message_service import MessageService + + +def message_instance_list( + process_instance_id: Optional[int] = None, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Message_instance_list.""" + # to make sure the process instance exists + message_instances_query = MessageInstanceModel.query + + if process_instance_id: + message_instances_query = message_instances_query.filter_by( + process_instance_id=process_instance_id + ) + + message_instances = ( + message_instances_query.order_by( + MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore + MessageInstanceModel.id.desc(), # type: ignore + ) + .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) + .join(ProcessInstanceModel) + .add_columns( + MessageModel.identifier.label("message_identifier"), + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + for message_instance in message_instances: + message_correlations: dict = {} + for ( + mcmi + ) in ( + message_instance.MessageInstanceModel.message_correlations_message_instances + ): + mc = MessageCorrelationModel.query.filter_by( + id=mcmi.message_correlation_id + ).all() + for m in mc: + if m.name not in message_correlations: + message_correlations[m.name] = {} + message_correlations[m.name][ + m.message_correlation_property.identifier + ] = m.value + message_instance.MessageInstanceModel.message_correlations = ( + message_correlations + ) + + response_json = { + "results": message_instances.items, + "pagination": { + "count": len(message_instances.items), + "total": message_instances.total, + "pages": message_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +# body: { +# payload: dict, +# process_instance_id: Optional[int], +# } +def message_start( + message_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Message_start.""" + message_model = MessageModel.query.filter_by(identifier=message_identifier).first() + if message_model is None: + raise ( + ApiError( + error_code="unknown_message", + message=f"Could not find message with identifier: {message_identifier}", + status_code=404, + ) + ) + + if "payload" not in body: + raise ( + ApiError( + error_code="missing_payload", + message="Body is missing payload.", + status_code=400, + ) + ) + + process_instance = None + if "process_instance_id" in body: + # to make sure we have a valid process_instance_id + process_instance = _find_process_instance_by_id_or_raise( + body["process_instance_id"] + ) + + message_instance = MessageInstanceModel.query.filter_by( + process_instance_id=process_instance.id, + message_model_id=message_model.id, + message_type="receive", + status="ready", + ).first() + if message_instance is None: + raise ( + ApiError( + error_code="cannot_find_waiting_message", + message=( + "Could not find waiting message for identifier" + f" {message_identifier} and process instance" + f" {process_instance.id}" + ), + status_code=400, + ) + ) + MessageService.process_message_receive( + message_instance, message_model.name, body["payload"] + ) + + else: + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_model.id + ).first() + ) + + if message_triggerable_process_model is None: + raise ( + ApiError( + error_code="cannot_start_message", + message=( + "Message with identifier cannot be start with message:" + f" {message_identifier}" + ), + status_code=400, + ) + ) + + process_instance = MessageService.process_message_triggerable_process_model( + message_triggerable_process_model, + message_model.name, + body["payload"], + g.user, + ) + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py index f812ab034..f25100eed 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -111,6 +111,7 @@ def token() -> dict: "iat": time.time(), "exp": time.time() + 86400, # Expire after a day. "sub": user_name, + "email": user_details["email"], "preferred_username": user_details.get("preferred_username", user_name), }, client_secret, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 5c0fdb5c2..4a6cc1c42 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,141 +1,54 @@ """APIs for dealing with process groups, process models, and process instances.""" import json -import os -import random -import string -import uuid from typing import Any from typing import Dict -from typing import Optional -from typing import TypedDict -from typing import Union -import connexion # type: ignore import flask.wrappers -import jinja2 -import werkzeug from flask import Blueprint from flask import current_app from flask import g from flask import jsonify from flask import make_response -from flask import redirect from flask import request from flask.wrappers import Response from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db -from lxml import etree # type: ignore -from lxml.builder import ElementMaker # type: ignore -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ -from sqlalchemy import asc -from sqlalchemy import desc -from sqlalchemy import or_ from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.file import FileSchema -from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.models.human_task import HumanTaskModel -from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel -from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel -from spiffworkflow_backend.models.message_instance import MessageInstanceModel -from spiffworkflow_backend.models.message_model import MessageModel -from spiffworkflow_backend.models.message_triggerable_process_model import ( - MessageTriggerableProcessModel, -) from spiffworkflow_backend.models.principal import PrincipalModel -from spiffworkflow_backend.models.process_group import ProcessGroup -from spiffworkflow_backend.models.process_group import ProcessGroupSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema -from spiffworkflow_backend.models.process_instance import ( - ProcessInstanceCannotBeDeletedError, -) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_instance import ( ProcessInstanceTaskDataCannotBeUpdatedError, ) -from spiffworkflow_backend.models.process_instance_metadata import ( - ProcessInstanceMetadataModel, -) -from spiffworkflow_backend.models.process_instance_report import ( - ProcessInstanceReportModel, -) from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema -from spiffworkflow_backend.models.secret_model import SecretModel -from spiffworkflow_backend.models.secret_model import SecretModelSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceCache -from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.services.authorization_service import AuthorizationService -from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService -from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.git_service import GitService -from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportFilter, -) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportService, -) -from spiffworkflow_backend.services.process_instance_service import ( - ProcessInstanceService, -) from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner -from spiffworkflow_backend.services.secret_service import SecretService -from spiffworkflow_backend.services.service_task_service import ServiceTaskService -from spiffworkflow_backend.services.spec_file_service import SpecFileService -from spiffworkflow_backend.services.user_service import UserService - - -class TaskDataSelectOption(TypedDict): - """TaskDataSelectOption.""" - - value: str - label: str - - -class ReactJsonSchemaSelectOption(TypedDict): - """ReactJsonSchemaSelectOption.""" - - type: str - title: str - enum: list[str] process_api_blueprint = Blueprint("process_api", __name__) -def status() -> flask.wrappers.Response: - """Status.""" - ProcessInstanceModel.query.filter().first() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response: """Permissions_check.""" if "requests_to_check" not in body: raise ( ApiError( error_code="could_not_requests_to_check", - message="The key 'requests_to_check' not found at root of request body.", + message=( + "The key 'requests_to_check' not found at root of request body." + ), status_code=400, ) ) - response_dict: dict[str, dict[str, bool]] = {} requests_to_check = body["requests_to_check"] @@ -158,295 +71,6 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R return make_response(jsonify({"results": response_dict}), 200) -def modify_process_model_id(process_model_id: str) -> str: - """Modify_process_model_id.""" - return process_model_id.replace("/", ":") - - -def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: - """Un_modify_modified_process_model_id.""" - return modified_process_model_identifier.replace(":", "/") - - -def process_group_add(body: dict) -> flask.wrappers.Response: - """Add_process_group.""" - process_group = ProcessGroup(**body) - ProcessModelService.add_process_group(process_group) - _commit_and_push_to_git( - f"User: {g.user.username} added process group {process_group.id}" - ) - return make_response(jsonify(process_group), 201) - - -def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: - """Process_group_delete.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - ProcessModelService().process_group_delete(process_group_id) - _commit_and_push_to_git( - f"User: {g.user.username} deleted process group {process_group_id}" - ) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_group_update( - modified_process_group_id: str, body: dict -) -> flask.wrappers.Response: - """Process Group Update.""" - body_include_list = ["display_name", "description"] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - process_group = ProcessGroup(id=process_group_id, **body_filtered) - ProcessModelService.update_process_group(process_group) - _commit_and_push_to_git( - f"User: {g.user.username} updated process group {process_group_id}" - ) - return make_response(jsonify(process_group), 200) - - -def process_group_list( - process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_group_list.""" - if process_group_identifier is not None: - process_groups = ProcessModelService.get_process_groups( - process_group_identifier - ) - else: - process_groups = ProcessModelService.get_process_groups() - batch = ProcessModelService().get_batch( - items=process_groups, page=page, per_page=per_page - ) - pages = len(process_groups) // per_page - remainder = len(process_groups) % per_page - if remainder > 0: - pages += 1 - - response_json = { - "results": ProcessGroupSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_groups), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def process_group_show( - modified_process_group_id: str, -) -> Any: - """Process_group_show.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - try: - process_group = ProcessModelService.get_process_group(process_group_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_group_cannot_be_found", - message=f"Process group cannot be found: {process_group_id}", - status_code=400, - ) - ) from exception - - process_group.parent_groups = ProcessModelService.get_parent_group_array( - process_group.id - ) - return make_response(jsonify(process_group), 200) - - -def process_group_move( - modified_process_group_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_group_move.""" - original_process_group_id = un_modify_modified_process_model_id( - modified_process_group_identifier - ) - new_process_group = ProcessModelService().process_group_move( - original_process_group_id, new_location - ) - _commit_and_push_to_git( - f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}" - ) - return make_response(jsonify(new_process_group), 200) - - -def process_model_create( - modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Process_model_create.""" - body_include_list = [ - "id", - "display_name", - "primary_file_name", - "primary_process_id", - "description", - "metadata_extraction_paths", - ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - if modified_process_group_id is None: - raise ApiError( - error_code="process_group_id_not_specified", - message="Process Model could not be created when process_group_id path param is unspecified", - status_code=400, - ) - - unmodified_process_group_id = un_modify_modified_process_model_id( - modified_process_group_id - ) - process_group = ProcessModelService.get_process_group(unmodified_process_group_id) - if process_group is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body because Process Group could not be found: {body}", - status_code=400, - ) - - process_model_info = ProcessModelInfo(**body_filtered) # type: ignore - if process_model_info is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body: {body}", - status_code=400, - ) - - ProcessModelService.add_process_model(process_model_info) - _commit_and_push_to_git( - f"User: {g.user.username} created process model {process_model_info.id}" - ) - return Response( - json.dumps(ProcessModelInfoSchema().dump(process_model_info)), - status=201, - mimetype="application/json", - ) - - -def process_model_delete( - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_model_delete.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - ProcessModelService().process_model_delete(process_model_identifier) - _commit_and_push_to_git( - f"User: {g.user.username} deleted process model {process_model_identifier}" - ) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_update( - modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] -) -> Any: - """Process_model_update.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - body_include_list = [ - "display_name", - "primary_file_name", - "primary_process_id", - "description", - "metadata_extraction_paths", - ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - process_model = get_process_model(process_model_identifier) - ProcessModelService.update_process_model(process_model, body_filtered) - _commit_and_push_to_git( - f"User: {g.user.username} updated process model {process_model_identifier}" - ) - return ProcessModelInfoSchema().dump(process_model) - - -def process_model_show(modified_process_model_identifier: str) -> Any: - """Process_model_show.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - files = sorted( - SpecFileService.get_files(process_model), - key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index, - ) - process_model.files = files - for file in process_model.files: - file.references = SpecFileService.get_references_for_file(file, process_model) - - process_model.parent_groups = ProcessModelService.get_parent_group_array( - process_model.id - ) - return make_response(jsonify(process_model), 200) - - -def process_model_move( - modified_process_model_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_model_move.""" - original_process_model_id = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - new_process_model = ProcessModelService().process_model_move( - original_process_model_id, new_location - ) - _commit_and_push_to_git( - f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}" - ) - return make_response(jsonify(new_process_model), 200) - - -def process_model_publish( - modified_process_model_identifier: str, branch_to_update: Optional[str] = None -) -> flask.wrappers.Response: - """Process_model_publish.""" - if branch_to_update is None: - branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] - process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - pr_url = GitService().publish(process_model_identifier, branch_to_update) - data = {"ok": True, "pr_url": pr_url} - return Response(json.dumps(data), status=200, mimetype="application/json") - - -def process_model_list( - process_group_identifier: Optional[str] = None, - recursive: Optional[bool] = False, - filter_runnable_by_user: Optional[bool] = False, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process model list!""" - process_models = ProcessModelService.get_process_models( - process_group_id=process_group_identifier, - recursive=recursive, - filter_runnable_by_user=filter_runnable_by_user, - ) - batch = ProcessModelService().get_batch( - process_models, page=page, per_page=per_page - ) - pages = len(process_models) // per_page - remainder = len(process_models) % per_page - if remainder > 0: - pages += 1 - response_json = { - "results": ProcessModelInfoSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_models), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - def process_list() -> Any: """Returns a list of all known processes. @@ -457,1405 +81,28 @@ def process_list() -> Any: return SpecReferenceSchema(many=True).dump(references) -def get_file(modified_process_model_identifier: str, file_name: str) -> Any: - """Get_file.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - files = SpecFileService.get_files(process_model, file_name) - if len(files) == 0: - raise ApiError( - error_code="unknown file", - message=f"No information exists for file {file_name}" - f" it does not exist in workflow {process_model_identifier}.", - status_code=404, - ) - - file = files[0] - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - # file.process_group_id = process_model.process_group_id - return FileSchema().dump(file) - - -def process_model_file_update( - modified_process_model_identifier: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_update.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - - request_file = get_file_from_request() - request_file_contents = request_file.stream.read() - if not request_file_contents: - raise ApiError( - error_code="file_contents_empty", - message="Given request file does not have any content", - status_code=400, - ) - - SpecFileService.update_file(process_model, file_name, request_file_contents) - _commit_and_push_to_git( - f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" - ) - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_file_delete( - modified_process_model_identifier: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_delete.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - try: - SpecFileService.delete_file(process_model, file_name) - except FileNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_file_cannot_be_found", - message=f"Process model file cannot be found: {file_name}", - status_code=400, - ) - ) from exception - - _commit_and_push_to_git( - f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}" - ) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response: - """Add_file.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - request_file = get_file_from_request() - if not request_file.filename: - raise ApiError( - error_code="could_not_get_filename", - message="Could not get filename from request", - status_code=400, - ) - - file = SpecFileService.add_file( - process_model, request_file.filename, request_file.stream.read() - ) - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - _commit_and_push_to_git( - f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}" - ) - return Response( - json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" - ) - - -def process_instance_create( - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - process_instance = ( - ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user - ) - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=201, - mimetype="application/json", - ) - - -def process_instance_run( - modified_process_model_identifier: str, +def process_data_show( process_instance_id: int, - do_engine_steps: bool = True, + process_data_identifier: str, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - if process_instance.status != "not_started": - raise ApiError( - error_code="process_instance_not_runnable", - message=f"Process Instance ({process_instance.id}) is currently running or has already run.", - status_code=400, - ) - + """Process_data_show.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) processor = ProcessInstanceProcessor(process_instance) + all_process_data = processor.get_data() + process_data_value = None + if process_data_identifier in all_process_data: + process_data_value = all_process_data[process_data_identifier] - if do_engine_steps: - try: - processor.do_engine_steps() - except ApiError as e: - ErrorHandlingService().handle_error(processor, e) - raise e - except Exception as e: - ErrorHandlingService().handle_error(processor, e) - task = processor.bpmn_process_instance.last_task - raise ApiError.from_task( - error_code="unknown_exception", - message=f"An unknown error occurred. Original error: {e}", - status_code=400, - task=task, - ) from e - processor.save() - - if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: - MessageService.process_message_instances() - - process_instance_api = ProcessInstanceService.processor_to_process_instance_api( - processor + return make_response( + jsonify( + { + "process_data_identifier": process_data_identifier, + "process_data_value": process_data_value, + } + ), + 200, ) - process_instance_data = processor.get_data() - process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) - process_instance_metadata["data"] = process_instance_data - return Response( - json.dumps(process_instance_metadata), status=200, mimetype="application/json" - ) - - -def process_instance_terminate( - process_instance_id: int, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.terminate() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_suspend( - process_instance_id: int, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_suspend.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.suspend() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_resume( - process_instance_id: int, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_resume.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.resume() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_log_list( - modified_process_model_identifier: str, - process_instance_id: int, - page: int = 1, - per_page: int = 100, - detailed: bool = False, -) -> flask.wrappers.Response: - """Process_instance_log_list.""" - # to make sure the process instance exists - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - log_query = SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) - if not detailed: - log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore - - logs = ( - log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore - .join( - UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True - ) # isouter since if we don't have a user, we still want the log - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - response_json = { - "results": logs.items, - "pagination": { - "count": len(logs.items), - "total": logs.total, - "pages": logs.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def message_instance_list( - process_instance_id: Optional[int] = None, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Message_instance_list.""" - # to make sure the process instance exists - message_instances_query = MessageInstanceModel.query - - if process_instance_id: - message_instances_query = message_instances_query.filter_by( - process_instance_id=process_instance_id - ) - - message_instances = ( - message_instances_query.order_by( - MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore - MessageInstanceModel.id.desc(), # type: ignore - ) - .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) - .join(ProcessInstanceModel) - .add_columns( - MessageModel.identifier.label("message_identifier"), - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_model_display_name, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - for message_instance in message_instances: - message_correlations: dict = {} - for ( - mcmi - ) in ( - message_instance.MessageInstanceModel.message_correlations_message_instances - ): - mc = MessageCorrelationModel.query.filter_by( - id=mcmi.message_correlation_id - ).all() - for m in mc: - if m.name not in message_correlations: - message_correlations[m.name] = {} - message_correlations[m.name][ - m.message_correlation_property.identifier - ] = m.value - message_instance.MessageInstanceModel.message_correlations = ( - message_correlations - ) - - response_json = { - "results": message_instances.items, - "pagination": { - "count": len(message_instances.items), - "total": message_instances.total, - "pages": message_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -# body: { -# payload: dict, -# process_instance_id: Optional[int], -# } -def message_start( - message_identifier: str, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Message_start.""" - message_model = MessageModel.query.filter_by(identifier=message_identifier).first() - if message_model is None: - raise ( - ApiError( - error_code="unknown_message", - message=f"Could not find message with identifier: {message_identifier}", - status_code=404, - ) - ) - - if "payload" not in body: - raise ( - ApiError( - error_code="missing_payload", - message="Body is missing payload.", - status_code=400, - ) - ) - - process_instance = None - if "process_instance_id" in body: - # to make sure we have a valid process_instance_id - process_instance = find_process_instance_by_id_or_raise( - body["process_instance_id"] - ) - - message_instance = MessageInstanceModel.query.filter_by( - process_instance_id=process_instance.id, - message_model_id=message_model.id, - message_type="receive", - status="ready", - ).first() - if message_instance is None: - raise ( - ApiError( - error_code="cannot_find_waiting_message", - message=f"Could not find waiting message for identifier {message_identifier} " - f"and process instance {process_instance.id}", - status_code=400, - ) - ) - MessageService.process_message_receive( - message_instance, message_model.name, body["payload"] - ) - - else: - message_triggerable_process_model = ( - MessageTriggerableProcessModel.query.filter_by( - message_model_id=message_model.id - ).first() - ) - - if message_triggerable_process_model is None: - raise ( - ApiError( - error_code="cannot_start_message", - message=f"Message with identifier cannot be start with message: {message_identifier}", - status_code=400, - ) - ) - - process_instance = MessageService.process_message_triggerable_process_model( - message_triggerable_process_model, - message_model.name, - body["payload"], - g.user, - ) - - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - -def process_instance_list_for_me( - process_model_identifier: Optional[str] = None, - page: int = 1, - per_page: int = 100, - start_from: Optional[int] = None, - start_to: Optional[int] = None, - end_from: Optional[int] = None, - end_to: Optional[int] = None, - process_status: Optional[str] = None, - user_filter: Optional[bool] = False, - report_identifier: Optional[str] = None, - report_id: Optional[int] = None, - user_group_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Process_instance_list_for_me.""" - return process_instance_list( - process_model_identifier=process_model_identifier, - page=page, - per_page=per_page, - start_from=start_from, - start_to=start_to, - end_from=end_from, - end_to=end_to, - process_status=process_status, - user_filter=user_filter, - report_identifier=report_identifier, - report_id=report_id, - user_group_identifier=user_group_identifier, - with_relation_to_me=True, - ) - - -def process_instance_list( - process_model_identifier: Optional[str] = None, - page: int = 1, - per_page: int = 100, - start_from: Optional[int] = None, - start_to: Optional[int] = None, - end_from: Optional[int] = None, - end_to: Optional[int] = None, - process_status: Optional[str] = None, - with_relation_to_me: Optional[bool] = None, - user_filter: Optional[bool] = False, - report_identifier: Optional[str] = None, - report_id: Optional[int] = None, - user_group_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_id, report_identifier - ) - - if user_filter: - report_filter = ProcessInstanceReportFilter( - process_model_identifier=process_model_identifier, - user_group_identifier=user_group_identifier, - start_from=start_from, - start_to=start_to, - end_from=end_from, - end_to=end_to, - with_relation_to_me=with_relation_to_me, - process_status=process_status.split(",") if process_status else None, - ) - else: - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model_identifier, - user_group_identifier=user_group_identifier, - start_from=start_from, - start_to=start_to, - end_from=end_from, - end_to=end_to, - process_status=process_status, - with_relation_to_me=with_relation_to_me, - ) - ) - - response_json = ProcessInstanceReportService.run_process_instance_report( - report_filter=report_filter, - process_instance_report=process_instance_report, - page=page, - per_page=per_page, - user=g.user, - ) - - return make_response(jsonify(response_json), 200) - - -def process_instance_report_column_list() -> flask.wrappers.Response: - """Process_instance_report_column_list.""" - table_columns = ProcessInstanceReportService.builtin_column_options() - columns_for_metadata = ( - db.session.query(ProcessInstanceMetadataModel.key) - .order_by(ProcessInstanceMetadataModel.key) - .distinct() # type: ignore - .all() - ) - columns_for_metadata_strings = [ - {"Header": i[0], "accessor": i[0], "filterable": True} - for i in columns_for_metadata - ] - return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) - - -def process_instance_show_for_me( - modified_process_model_identifier: str, - process_instance_id: int, - process_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Process_instance_show_for_me.""" - process_instance = _find_process_instance_for_me_or_raise(process_instance_id) - return _get_process_instance( - process_instance=process_instance, - modified_process_model_identifier=modified_process_model_identifier, - process_identifier=process_identifier, - ) - - -def process_instance_show( - modified_process_model_identifier: str, - process_instance_id: int, - process_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - return _get_process_instance( - process_instance=process_instance, - modified_process_model_identifier=modified_process_model_identifier, - process_identifier=process_identifier, - ) - - -def _get_process_instance( - modified_process_model_identifier: str, - process_instance: ProcessInstanceModel, - process_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """_get_process_instance.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - current_version_control_revision = GitService.get_current_revision() - - process_model_with_diagram = None - name_of_file_with_diagram = None - if process_identifier: - spec_reference = SpecReferenceCache.query.filter_by( - identifier=process_identifier - ).first() - if spec_reference is None: - raise SpecReferenceNotFoundError( - f"Could not find given process identifier in the cache: {process_identifier}" - ) - - process_model_with_diagram = ProcessModelService.get_process_model( - spec_reference.process_model_id - ) - name_of_file_with_diagram = spec_reference.file_name - else: - process_model_with_diagram = get_process_model(process_model_identifier) - if process_model_with_diagram.primary_file_name: - name_of_file_with_diagram = process_model_with_diagram.primary_file_name - - if process_model_with_diagram and name_of_file_with_diagram: - if ( - process_instance.bpmn_version_control_identifier - == current_version_control_revision - ): - bpmn_xml_file_contents = SpecFileService.get_data( - process_model_with_diagram, name_of_file_with_diagram - ).decode("utf-8") - else: - bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( - process_model_with_diagram, - process_instance.bpmn_version_control_identifier, - file_name=name_of_file_with_diagram, - ) - process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents - - return make_response(jsonify(process_instance), 200) - - -def process_instance_delete( - process_instance_id: int, modified_process_model_identifier: str -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if not process_instance.has_terminal_status(): - raise ProcessInstanceCannotBeDeletedError( - f"Process instance ({process_instance.id}) cannot be deleted since it does not have a terminal status. " - f"Current status is {process_instance.status}." - ) - - # (Pdb) db.session.delete - # > - db.session.query(SpiffLoggingModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.query(SpiffStepDetailsModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.delete(process_instance) - db.session.commit() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_list( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_instance_report_list.""" - process_instance_reports = ProcessInstanceReportModel.query.filter_by( - created_by_id=g.user.id, - ).all() - - return make_response(jsonify(process_instance_reports), 200) - - -def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.create_report( - identifier=body["identifier"], - user=g.user, - report_metadata=body["report_metadata"], - ) - - return make_response(jsonify(process_instance_report), 201) - - -def process_instance_report_update( - report_id: int, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - process_instance_report.report_metadata = body["report_metadata"] - db.session.commit() - - return make_response(jsonify(process_instance_report), 201) - - -def process_instance_report_delete( - report_id: int, -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - db.session.delete(process_instance_report) - db.session.commit() - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def service_task_list() -> flask.wrappers.Response: - """Service_task_list.""" - available_connectors = ServiceTaskService.available_connectors() - return Response( - json.dumps(available_connectors), status=200, mimetype="application/json" - ) - - -def authentication_list() -> flask.wrappers.Response: - """Authentication_list.""" - available_authentications = ServiceTaskService.authentication_list() - response_json = { - "results": available_authentications, - "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], - "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", - } - - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def authentication_callback( - service: str, - auth_method: str, -) -> werkzeug.wrappers.Response: - """Authentication_callback.""" - verify_token(request.args.get("token"), force_run=True) - response = request.args["response"] - SecretService().update_secret( - f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True - ) - return redirect( - f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" - ) - - -def process_instance_report_show( - report_id: int, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process_instance_report_show.""" - process_instances = ProcessInstanceModel.query.order_by( - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate(page=page, per_page=per_page, error_out=False) - - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report( - process_instances.items, substitution_variables - ) - - # update this if we go back to a database query instead of filtering in memory - result_dict["pagination"] = { - "count": len(result_dict["results"]), - "total": len(result_dict["results"]), - "pages": 1, - } - - return Response(json.dumps(result_dict), status=200, mimetype="application/json") - - -# TODO: see comment for before_request -# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) -def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_my_tasks.""" - principal = find_principal_or_raise() - human_tasks = ( - HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore - .join(ProcessInstanceModel) - .join(HumanTaskUserModel) - .filter_by(user_id=principal.user_id) - .filter(HumanTaskModel.completed == False) # noqa: E712 - # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. - .add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_model_display_name, - ProcessInstanceModel.status, - HumanTaskModel.task_name, - HumanTaskModel.task_title, - HumanTaskModel.task_type, - HumanTaskModel.task_status, - HumanTaskModel.task_id, - HumanTaskModel.id, - HumanTaskModel.process_model_display_name, - HumanTaskModel.process_instance_id, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items] - - response_json = { - "results": tasks, - "pagination": { - "count": len(human_tasks.items), - "total": human_tasks.total, - "pages": human_tasks.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def task_list_for_my_open_processes( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_my_open_processes.""" - return get_tasks(page=page, per_page=per_page) - - -def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_for_me.""" - return get_tasks( - processes_started_by_user=False, - has_lane_assignment_id=False, - page=page, - per_page=per_page, - ) - - -def task_list_for_my_groups( - user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_my_groups.""" - return get_tasks( - user_group_identifier=user_group_identifier, - processes_started_by_user=False, - page=page, - per_page=per_page, - ) - - -def user_group_list_for_current_user() -> flask.wrappers.Response: - """User_group_list_for_current_user.""" - groups = g.user.groups - # TODO: filter out the default group and have a way to know what is the default group - group_identifiers = [i.identifier for i in groups if i.identifier != "everybody"] - return make_response(jsonify(sorted(group_identifiers)), 200) - - -def get_tasks( - processes_started_by_user: bool = True, - has_lane_assignment_id: bool = True, - page: int = 1, - per_page: int = 100, - user_group_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Get_tasks.""" - user_id = g.user.id - - # use distinct to ensure we only get one row per human task otherwise - # we can get back multiple for the same human task row which throws off - # pagination later on - # https://stackoverflow.com/q/34582014/6090676 - human_tasks_query = ( - HumanTaskModel.query.distinct() - .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id) - .join(ProcessInstanceModel) - .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - .filter(HumanTaskModel.completed == False) # noqa: E712 - ) - - if processes_started_by_user: - human_tasks_query = human_tasks_query.filter( - ProcessInstanceModel.process_initiator_id == user_id - ).outerjoin( - HumanTaskUserModel, - and_( - HumanTaskUserModel.user_id == user_id, - HumanTaskModel.id == HumanTaskUserModel.human_task_id, - ), - ) - else: - human_tasks_query = human_tasks_query.filter( - ProcessInstanceModel.process_initiator_id != user_id - ).join( - HumanTaskUserModel, - and_( - HumanTaskUserModel.user_id == user_id, - HumanTaskModel.id == HumanTaskUserModel.human_task_id, - ), - ) - if has_lane_assignment_id: - if user_group_identifier: - human_tasks_query = human_tasks_query.filter( - GroupModel.identifier == user_group_identifier - ) - else: - human_tasks_query = human_tasks_query.filter( - HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore - ) - else: - human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore - - human_tasks = ( - human_tasks_query.add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - ProcessInstanceModel.updated_at_in_seconds, - ProcessInstanceModel.created_at_in_seconds, - UserModel.username, - GroupModel.identifier.label("user_group_identifier"), - HumanTaskModel.task_name, - HumanTaskModel.task_title, - HumanTaskModel.process_model_display_name, - HumanTaskModel.process_instance_id, - HumanTaskUserModel.user_id.label("current_user_is_potential_owner"), - ) - .order_by(desc(HumanTaskModel.id)) # type: ignore - .paginate(page=page, per_page=per_page, error_out=False) - ) - - response_json = { - "results": human_tasks.items, - "pagination": { - "count": len(human_tasks.items), - "total": human_tasks.total, - "pages": human_tasks.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def process_instance_task_list_without_task_data_for_me( - modified_process_model_identifier: str, - process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, -) -> flask.wrappers.Response: - """Process_instance_task_list_without_task_data_for_me.""" - process_instance = _find_process_instance_for_me_or_raise(process_instance_id) - print(f"process_instance: {process_instance}") - return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - get_task_data=False, - ) - - -def process_instance_task_list_without_task_data( - modified_process_model_identifier: str, - process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, -) -> flask.wrappers.Response: - """Process_instance_task_list_without_task_data.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - get_task_data=False, - ) - - -def process_instance_task_list_with_task_data( - modified_process_model_identifier: str, - process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, -) -> flask.wrappers.Response: - """Process_instance_task_list_with_task_data.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - get_task_data=True, - ) - - -def process_instance_task_list( - _modified_process_model_identifier: str, - process_instance: ProcessInstanceModel, - all_tasks: bool = False, - spiff_step: int = 0, - get_task_data: bool = False, -) -> flask.wrappers.Response: - """Process_instance_task_list.""" - if spiff_step > 0: - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance.id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) - if step_detail is not None and process_instance.bpmn_json is not None: - bpmn_json = json.loads(process_instance.bpmn_json) - bpmn_json["tasks"] = step_detail.task_json["tasks"] - bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] - process_instance.bpmn_json = json.dumps(bpmn_json) - - processor = ProcessInstanceProcessor(process_instance) - - spiff_tasks = None - if all_tasks: - spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - else: - spiff_tasks = processor.get_all_user_tasks() - - tasks = [] - for spiff_task in spiff_tasks: - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - if get_task_data: - task.data = spiff_task.data - tasks.append(task) - - return make_response(jsonify(tasks), 200) - - -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: - """Task_show.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if process_instance.status == ProcessInstanceStatus.suspended.value: - raise ApiError( - error_code="error_suspended", - message="The process instance is suspended", - status_code=400, - ) - - process_model = get_process_model( - process_instance.process_model_identifier, - ) - - form_schema_file_name = "" - form_ui_schema_file_name = "" - spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) - extensions = spiff_task.task_spec.extensions - - if "properties" in extensions: - properties = extensions["properties"] - if "formJsonSchemaFilename" in properties: - form_schema_file_name = properties["formJsonSchemaFilename"] - if "formUiSchemaFilename" in properties: - form_ui_schema_file_name = properties["formUiSchemaFilename"] - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - task.process_model_display_name = process_model.display_name - task.process_model_identifier = process_model.id - - process_model_with_form = process_model - refs = SpecFileService.get_references_for_process(process_model_with_form) - all_processes = [i.identifier for i in refs] - if task.process_identifier not in all_processes: - bpmn_file_full_path = ( - ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( - task.process_identifier - ) - ) - relative_path = os.path.relpath( - bpmn_file_full_path, start=FileSystemService.root_path() - ) - process_model_relative_path = os.path.dirname(relative_path) - process_model_with_form = ( - ProcessModelService.get_process_model_from_relative_path( - process_model_relative_path - ) - ) - - if task.type == "User Task": - if not form_schema_file_name: - raise ( - ApiError( - error_code="missing_form_file", - message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", - status_code=400, - ) - ) - - form_contents = prepare_form_data( - form_schema_file_name, - task.data, - process_model_with_form, - ) - - try: - # form_contents is a str - form_dict = json.loads(form_contents) - except Exception as exception: - raise ( - ApiError( - error_code="error_loading_form", - message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", - status_code=400, - ) - ) from exception - - if task.data: - _update_form_schema_with_task_data_as_needed(form_dict, task.data) - - if form_contents: - task.form_schema = form_dict - - if form_ui_schema_file_name: - ui_form_contents = prepare_form_data( - form_ui_schema_file_name, - task.data, - process_model_with_form, - ) - if ui_form_contents: - task.form_ui_schema = ui_form_contents - - if task.properties and task.data and "instructionsForEndUser" in task.properties: - if task.properties["instructionsForEndUser"]: - task.properties["instructionsForEndUser"] = render_jinja_template( - task.properties["instructionsForEndUser"], task.data - ) - return make_response(jsonify(task), 200) - - -def task_submit( - process_instance_id: int, - task_id: str, - body: Dict[str, Any], - terminate_loop: bool = False, -) -> flask.wrappers.Response: - """Task_submit_user_data.""" - principal = find_principal_or_raise() - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - if not process_instance.can_submit_task(): - raise ApiError( - error_code="process_instance_not_runnable", - message=f"Process Instance ({process_instance.id}) has status " - f"{process_instance.status} which does not allow tasks to be submitted.", - status_code=400, - ) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = get_spiff_task_from_process_instance( - task_id, process_instance, processor=processor - ) - AuthorizationService.assert_user_can_complete_spiff_task( - process_instance.id, spiff_task, principal.user - ) - - if spiff_task.state != TaskState.READY: - raise ( - ApiError( - error_code="invalid_state", - message="You may not update a task unless it is in the READY state.", - status_code=400, - ) - ) - - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - human_task = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id, completed=False - ).first() - if human_task is None: - raise ( - ApiError( - error_code="no_human_task", - message="Cannot find an human task with task id '{task_id}' for process instance {process_instance_id}.", - status_code=500, - ) - ) - - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - - # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same - # task spec, complete that form as well. - # if update_all: - # last_index = spiff_task.task_info()["mi_index"] - # next_task = processor.next_task() - # while next_task and next_task.task_info()["mi_index"] > last_index: - # __update_task(processor, next_task, form_data, user) - # last_index = next_task.task_info()["mi_index"] - # next_task = processor.next_task() - - next_human_task_assigned_to_me = ( - HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, completed=False - ) - .order_by(asc(HumanTaskModel.id)) # type: ignore - .join(HumanTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_human_task_assigned_to_me: - return make_response( - jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200 - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_create( - modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_create.""" - bpmn_task_identifier = _get_required_parameter_or_raise( - "bpmn_task_identifier", body - ) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] - if file is None: - raise ApiError( - error_code="cannot_find_file", - message=f"Could not find the primary bpmn file for process_model: {process_model.id}", - status_code=404, - ) - - # TODO: move this to an xml service or something - file_contents = SpecFileService.get_data(process_model, file.name) - bpmn_etree_element = etree.fromstring(file_contents) - - nsmap = bpmn_etree_element.nsmap - spiff_element_maker = ElementMaker( - namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap - ) - - script_task_elements = bpmn_etree_element.xpath( - f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(script_task_elements) == 0: - raise ApiError( - error_code="missing_script_task", - message=f"Cannot find a script task with id: {bpmn_task_identifier}", - status_code=404, - ) - script_task_element = script_task_elements[0] - - extension_elements = None - extension_elements_array = script_task_element.xpath( - "//bpmn:extensionElements", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(extension_elements_array) == 0: - bpmn_element_maker = ElementMaker( - namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap - ) - extension_elements = bpmn_element_maker("extensionElements") - script_task_element.append(extension_elements) - else: - extension_elements = extension_elements_array[0] - - unit_test_elements = None - unit_test_elements_array = extension_elements.xpath( - "//spiffworkflow:unitTests", - namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, - ) - if len(unit_test_elements_array) == 0: - unit_test_elements = spiff_element_maker("unitTests") - extension_elements.append(unit_test_elements) - else: - unit_test_elements = unit_test_elements_array[0] - - fuzz = "".join( - random.choice(string.ascii_uppercase + string.digits) # noqa: S311 - for _ in range(7) - ) - unit_test_id = f"unit_test_{fuzz}" - - input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) - expected_output_json_element = spiff_element_maker( - "expectedOutputJson", json.dumps(expected_output_json) - ) - unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) - unit_test_element.append(input_json_element) - unit_test_element.append(expected_output_json_element) - unit_test_elements.append(unit_test_element) - SpecFileService.update_file( - process_model, file.name, etree.tostring(bpmn_etree_element) - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_run( - modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_run.""" - # FIXME: We should probably clear this somewhere else but this works - current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None - - python_script = _get_required_parameter_or_raise("python_script", body) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( - python_script, input_json, expected_output_json - ) - return make_response(jsonify(result), 200) - - -def get_file_from_request() -> Any: - """Get_file_from_request.""" - request_file = connexion.request.files.get("file") - if not request_file: - raise ApiError( - error_code="no_file_given", - message="Given request does not contain a file", - status_code=400, - ) - return request_file - - -# process_model_id uses forward slashes on all OSes -# this seems to return an object where process_model.id has backslashes on windows -def get_process_model(process_model_id: str) -> ProcessModelInfo: - """Get_process_model.""" - process_model = None - try: - process_model = ProcessModelService.get_process_model(process_model_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_cannot_be_found", - message=f"Process model cannot be found: {process_model_id}", - status_code=400, - ) - ) from exception - - return process_model - - -def find_principal_or_raise() -> PrincipalModel: - """Find_principal_or_raise.""" - principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() - if principal is None: - raise ( - ApiError( - error_code="principal_not_found", - message=f"Principal not found from user id: {g.user.id}", - status_code=400, - ) - ) - return principal # type: ignore - - -def find_process_instance_by_id_or_raise( - process_instance_id: int, -) -> ProcessInstanceModel: - """Find_process_instance_by_id_or_raise.""" - process_instance_query = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ) - - # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: - # this returns an object that allows you to do: process_instance.UserModel.username - # process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first() - # you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance - # attributes or username like we wanted: - # process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username) - - process_instance = process_instance_query.first() - if process_instance is None: - raise ( - ApiError( - error_code="process_instance_cannot_be_found", - message=f"Process instance cannot be found: {process_instance_id}", - status_code=400, - ) - ) - return process_instance # type: ignore - - -def get_value_from_array_with_index(array: list, index: int) -> Any: - """Get_value_from_array_with_index.""" - if index < 0: - return None - - if index >= len(array): - return None - - return array[index] - - -def prepare_form_data( - form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo -) -> str: - """Prepare_form_data.""" - if task_data is None: - return "" - - file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") - return render_jinja_template(file_contents, task_data) - - -def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: - """Render_jinja_template.""" - jinja_environment = jinja2.Environment( - autoescape=True, lstrip_blocks=True, trim_blocks=True - ) - template = jinja_environment.from_string(unprocessed_template) - return template.render(**data) - - -def get_spiff_task_from_process_instance( - task_id: str, - process_instance: ProcessInstanceModel, - processor: Union[ProcessInstanceProcessor, None] = None, -) -> SpiffTask: - """Get_spiff_task_from_process_instance.""" - if processor is None: - processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) - spiff_task = processor.bpmn_process_instance.get_task(task_uuid) - - if spiff_task is None: - raise ( - ApiError( - error_code="empty_task", - message="Processor failed to obtain task.", - status_code=500, - ) - ) - return spiff_task # sample body: @@ -1873,64 +120,7 @@ def github_webhook_receive(body: Dict) -> Response: ) -# -# Methods for secrets CRUD - maybe move somewhere else: -# - - -def get_secret(key: str) -> Optional[str]: - """Get_secret.""" - return SecretService.get_secret(key) - - -def secret_list( - page: int = 1, - per_page: int = 100, -) -> Response: - """Secret_list.""" - secrets = ( - SecretModel.query.order_by(SecretModel.key) - .join(UserModel) - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - response_json = { - "results": secrets.items, - "pagination": { - "count": len(secrets.items), - "total": secrets.total, - "pages": secrets.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def add_secret(body: Dict) -> Response: - """Add secret.""" - secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) - return Response( - json.dumps(SecretModelSchema().dump(secret_model)), - status=201, - mimetype="application/json", - ) - - -def update_secret(key: str, body: dict) -> Response: - """Update secret.""" - SecretService().update_secret(key, body["value"], g.user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def delete_secret(key: str) -> Response: - """Delete secret.""" - current_user = UserService.current_user() - SecretService.delete_secret(key, current_user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def update_task_data( +def task_data_update( process_instance_id: str, modified_process_model_identifier: str, task_id: str, @@ -1943,7 +133,8 @@ def update_task_data( if process_instance: if process_instance.status != "suspended": raise ProcessInstanceTaskDataCannotBeUpdatedError( - f"The process instance needs to be suspended to udpate the task-data. It is currently: {process_instance.status}" + "The process instance needs to be suspended to udpate the task-data." + f" It is currently: {process_instance.status}" ) process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) @@ -1967,12 +158,18 @@ def update_task_data( else: raise ApiError( error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", + message=( + f"Could not find Task: {task_id} in Instance:" + f" {process_instance_id}." + ), ) else: raise ApiError( error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", + message=( + f"Could not update task data for Instance: {process_instance_id}, and" + f" Task: {task_id}." + ), ) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), @@ -1999,62 +196,56 @@ def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) return return_value -# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches -def _update_form_schema_with_task_data_as_needed( - in_dict: dict, task_data: dict -) -> None: - """Update_nested.""" - for k, value in in_dict.items(): - if "anyOf" == k: - # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] - if isinstance(value, list): - if len(value) == 1: - first_element_in_value_list = value[0] - if isinstance(first_element_in_value_list, str): - if first_element_in_value_list.startswith( - "options_from_task_data_var:" - ): - task_data_var = first_element_in_value_list.replace( - "options_from_task_data_var:", "" - ) +def send_bpmn_event( + modified_process_model_identifier: str, + process_instance_id: str, + body: Dict, +) -> Response: + """Send a bpmn event to a workflow.""" + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.send_bpmn_event(body) + else: + raise ApiError( + error_code="send_bpmn_event_error", + message=f"Could not send event to Instance: {process_instance_id}", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) - if task_data_var not in task_data: - raise ( - ApiError( - error_code="missing_task_data_var", - message=f"Task data is missing variable: {task_data_var}", - status_code=500, - ) - ) - select_options_from_task_data = task_data.get(task_data_var) - if isinstance(select_options_from_task_data, list): - if all( - "value" in d and "label" in d - for d in select_options_from_task_data - ): - - def map_function( - task_data_select_option: TaskDataSelectOption, - ) -> ReactJsonSchemaSelectOption: - """Map_function.""" - return { - "type": "string", - "enum": [task_data_select_option["value"]], - "title": task_data_select_option["label"], - } - - options_for_react_json_schema_form = list( - map(map_function, select_options_from_task_data) - ) - - in_dict[k] = options_for_react_json_schema_form - elif isinstance(value, dict): - _update_form_schema_with_task_data_as_needed(value, task_data) - elif isinstance(value, list): - for o in value: - if isinstance(o, dict): - _update_form_schema_with_task_data_as_needed(o, task_data) +def manual_complete_task( + modified_process_model_identifier: str, + process_instance_id: str, + task_id: str, + body: Dict, +) -> Response: + """Mark a task complete without executing it.""" + execute = body.get("execute", True) + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.manual_complete_task(task_id, execute) + else: + raise ApiError( + error_code="complete_task", + message=( + f"Could not complete Task {task_id} in Instance {process_instance_id}" + ), + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) def _commit_and_push_to_git(message: str) -> None: @@ -2066,36 +257,66 @@ def _commit_and_push_to_git(message: str) -> None: current_app.logger.info("Git commit on save is disabled") -def _find_process_instance_for_me_or_raise( +def _un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: + """Un_modify_modified_process_model_id.""" + return modified_process_model_identifier.replace(":", "/") + + +def _find_process_instance_by_id_or_raise( process_instance_id: int, ) -> ProcessInstanceModel: - """_find_process_instance_for_me_or_raise.""" - process_instance: ProcessInstanceModel = ( - ProcessInstanceModel.query.filter_by(id=process_instance_id) - .outerjoin(HumanTaskModel) - .outerjoin( - HumanTaskUserModel, - and_( - HumanTaskModel.id == HumanTaskUserModel.human_task_id, - HumanTaskUserModel.user_id == g.user.id, - ), - ) - .filter( - or_( - HumanTaskUserModel.id.is_not(None), - ProcessInstanceModel.process_initiator_id == g.user.id, - ) - ) - .first() + """Find_process_instance_by_id_or_raise.""" + process_instance_query = ProcessInstanceModel.query.filter_by( + id=process_instance_id ) + # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: + # this returns an object that allows you to do: process_instance.UserModel.username + # process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first() + # you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance + # attributes or username like we wanted: + # process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username) + + process_instance = process_instance_query.first() if process_instance is None: raise ( ApiError( error_code="process_instance_cannot_be_found", - message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.", + message=f"Process instance cannot be found: {process_instance_id}", status_code=400, ) ) + return process_instance # type: ignore - return process_instance + +# process_model_id uses forward slashes on all OSes +# this seems to return an object where process_model.id has backslashes on windows +def _get_process_model(process_model_id: str) -> ProcessModelInfo: + """Get_process_model.""" + process_model = None + try: + process_model = ProcessModelService.get_process_model(process_model_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_cannot_be_found", + message=f"Process model cannot be found: {process_model_id}", + status_code=400, + ) + ) from exception + + return process_model + + +def _find_principal_or_raise() -> PrincipalModel: + """Find_principal_or_raise.""" + principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() + if principal is None: + raise ( + ApiError( + error_code="principal_not_found", + message=f"Principal not found from user id: {g.user.id}", + status_code=400, + ) + ) + return principal # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py new file mode 100644 index 000000000..228be1815 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py @@ -0,0 +1,130 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Any +from typing import Optional + +import flask.wrappers +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git +from spiffworkflow_backend.routes.process_api_blueprint import ( + _un_modify_modified_process_model_id, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +def process_group_create(body: dict) -> flask.wrappers.Response: + """Add_process_group.""" + process_group = ProcessGroup(**body) + ProcessModelService.add_process_group(process_group) + _commit_and_push_to_git( + f"User: {g.user.username} added process group {process_group.id}" + ) + return make_response(jsonify(process_group), 201) + + +def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: + """Process_group_delete.""" + process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) + ProcessModelService().process_group_delete(process_group_id) + _commit_and_push_to_git( + f"User: {g.user.username} deleted process group {process_group_id}" + ) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_group_update( + modified_process_group_id: str, body: dict +) -> flask.wrappers.Response: + """Process Group Update.""" + body_include_list = ["display_name", "description"] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) + process_group = ProcessGroup(id=process_group_id, **body_filtered) + ProcessModelService.update_process_group(process_group) + _commit_and_push_to_git( + f"User: {g.user.username} updated process group {process_group_id}" + ) + return make_response(jsonify(process_group), 200) + + +def process_group_list( + process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_group_list.""" + if process_group_identifier is not None: + process_groups = ProcessModelService.get_process_groups( + process_group_identifier + ) + else: + process_groups = ProcessModelService.get_process_groups() + batch = ProcessModelService().get_batch( + items=process_groups, page=page, per_page=per_page + ) + pages = len(process_groups) // per_page + remainder = len(process_groups) % per_page + if remainder > 0: + pages += 1 + + response_json = { + "results": ProcessGroupSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_groups), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_group_show( + modified_process_group_id: str, +) -> Any: + """Process_group_show.""" + process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) + try: + process_group = ProcessModelService.get_process_group(process_group_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_group_cannot_be_found", + message=f"Process group cannot be found: {process_group_id}", + status_code=400, + ) + ) from exception + + process_group.parent_groups = ProcessModelService.get_parent_group_array( + process_group.id + ) + return make_response(jsonify(process_group), 200) + + +def process_group_move( + modified_process_group_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_group_move.""" + original_process_group_id = _un_modify_modified_process_model_id( + modified_process_group_identifier + ) + new_process_group = ProcessModelService().process_group_move( + original_process_group_id, new_location + ) + _commit_and_push_to_git( + f"User: {g.user.username} moved process group {original_process_group_id} to" + f" {new_process_group.id}" + ) + return make_response(jsonify(new_process_group), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py new file mode 100644 index 000000000..3f7da50c9 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -0,0 +1,693 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Any +from typing import Dict +from typing import Optional + +import flask.wrappers +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask import request +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from SpiffWorkflow.task import TaskState # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ + +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceCannotBeDeletedError, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.spec_reference import SpecReferenceCache +from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel +from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_process_instance_by_id_or_raise, +) +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.routes.process_api_blueprint import ( + _un_modify_modified_process_model_id, +) +from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService +from spiffworkflow_backend.services.git_service import GitCommandError +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportFilter, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportService, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +def process_instance_create( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_model_identifier = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) + process_instance = ( + ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user + ) + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=201, + mimetype="application/json", + ) + + +def process_instance_run( + modified_process_model_identifier: str, + process_instance_id: int, + do_engine_steps: bool = True, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + if process_instance.status != "not_started": + raise ApiError( + error_code="process_instance_not_runnable", + message=( + f"Process Instance ({process_instance.id}) is currently running or has" + " already run." + ), + status_code=400, + ) + + processor = ProcessInstanceProcessor(process_instance) + + if do_engine_steps: + try: + processor.do_engine_steps(save=True) + except ApiError as e: + ErrorHandlingService().handle_error(processor, e) + raise e + except Exception as e: + ErrorHandlingService().handle_error(processor, e) + task = processor.bpmn_process_instance.last_task + raise ApiError.from_task( + error_code="unknown_exception", + message=f"An unknown error occurred. Original error: {e}", + status_code=400, + task=task, + ) from e + + if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: + MessageService.process_message_instances() + + process_instance_api = ProcessInstanceService.processor_to_process_instance_api( + processor + ) + process_instance_data = processor.get_data() + process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) + process_instance_metadata["data"] = process_instance_data + return Response( + json.dumps(process_instance_metadata), status=200, mimetype="application/json" + ) + + +def process_instance_terminate( + process_instance_id: int, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.terminate() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_suspend( + process_instance_id: int, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_suspend.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.suspend() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_resume( + process_instance_id: int, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_resume.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_log_list( + modified_process_model_identifier: str, + process_instance_id: int, + page: int = 1, + per_page: int = 100, + detailed: bool = False, +) -> flask.wrappers.Response: + """Process_instance_log_list.""" + # to make sure the process instance exists + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + + log_query = SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + if not detailed: + log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore + + logs = ( + log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + .join( + UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True + ) # isouter since if we don't have a user, we still want the log + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": logs.items, + "pagination": { + "count": len(logs.items), + "total": logs.total, + "pages": logs.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def process_instance_list_for_me( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, + report_id: Optional[int] = None, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list_for_me.""" + return process_instance_list( + process_model_identifier=process_model_identifier, + page=page, + per_page=per_page, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + user_filter=user_filter, + report_identifier=report_identifier, + report_id=report_id, + user_group_identifier=user_group_identifier, + with_relation_to_me=True, + ) + + +def process_instance_list( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + with_relation_to_me: Optional[bool] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, + report_id: Optional[int] = None, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instance_report = ProcessInstanceReportService.report_with_identifier( + g.user, report_id, report_identifier + ) + + if user_filter: + report_filter = ProcessInstanceReportFilter( + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + with_relation_to_me=with_relation_to_me, + process_status=process_status.split(",") if process_status else None, + ) + else: + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + with_relation_to_me=with_relation_to_me, + ) + ) + + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + page=page, + per_page=per_page, + user=g.user, + ) + + return make_response(jsonify(response_json), 200) + + +def process_instance_report_column_list() -> flask.wrappers.Response: + """Process_instance_report_column_list.""" + table_columns = ProcessInstanceReportService.builtin_column_options() + columns_for_metadata = ( + db.session.query(ProcessInstanceMetadataModel.key) + .order_by(ProcessInstanceMetadataModel.key) + .distinct() # type: ignore + .all() + ) + columns_for_metadata_strings = [ + {"Header": i[0], "accessor": i[0], "filterable": True} + for i in columns_for_metadata + ] + return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) + + +def process_instance_show_for_me( + modified_process_model_identifier: str, + process_instance_id: int, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_show_for_me.""" + process_instance = _find_process_instance_for_me_or_raise(process_instance_id) + return _get_process_instance( + process_instance=process_instance, + modified_process_model_identifier=modified_process_model_identifier, + process_identifier=process_identifier, + ) + + +def process_instance_show( + modified_process_model_identifier: str, + process_instance_id: int, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + return _get_process_instance( + process_instance=process_instance, + modified_process_model_identifier=modified_process_model_identifier, + process_identifier=process_identifier, + ) + + +def process_instance_delete( + process_instance_id: int, modified_process_model_identifier: str +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + + if not process_instance.has_terminal_status(): + raise ProcessInstanceCannotBeDeletedError( + f"Process instance ({process_instance.id}) cannot be deleted since it does" + f" not have a terminal status. Current status is {process_instance.status}." + ) + + # (Pdb) db.session.delete + # > + db.session.query(SpiffLoggingModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.query(SpiffStepDetailsModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.delete(process_instance) + db.session.commit() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_list( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_instance_report_list.""" + process_instance_reports = ProcessInstanceReportModel.query.filter_by( + created_by_id=g.user.id, + ).all() + + return make_response(jsonify(process_instance_reports), 200) + + +def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.create_report( + identifier=body["identifier"], + user=g.user, + report_metadata=body["report_metadata"], + ) + + return make_response(jsonify(process_instance_report), 201) + + +def process_instance_report_update( + report_id: int, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Process_instance_report_update.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + process_instance_report.report_metadata = body["report_metadata"] + db.session.commit() + + return make_response(jsonify(process_instance_report), 201) + + +def process_instance_report_delete( + report_id: int, +) -> flask.wrappers.Response: + """Process_instance_report_delete.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + db.session.delete(process_instance_report) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_show( + report_id: int, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_report_show.""" + process_instances = ProcessInstanceModel.query.order_by( + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ).paginate(page=page, per_page=per_page, error_out=False) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + substitution_variables = request.args.to_dict() + result_dict = process_instance_report.generate_report( + process_instances.items, substitution_variables + ) + + # update this if we go back to a database query instead of filtering in memory + result_dict["pagination"] = { + "count": len(result_dict["results"]), + "total": len(result_dict["results"]), + "pages": 1, + } + + return Response(json.dumps(result_dict), status=200, mimetype="application/json") + + +def process_instance_task_list_without_task_data_for_me( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_without_task_data_for_me.""" + process_instance = _find_process_instance_for_me_or_raise(process_instance_id) + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=False, + ) + + +def process_instance_task_list_without_task_data( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_without_task_data.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=False, + ) + + +def process_instance_task_list_with_task_data( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_with_task_data.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=True, + ) + + +def process_instance_task_list( + _modified_process_model_identifier: str, + process_instance: ProcessInstanceModel, + all_tasks: bool = False, + spiff_step: int = 0, + get_task_data: bool = False, +) -> flask.wrappers.Response: + """Process_instance_task_list.""" + if spiff_step > 0: + step_detail = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.spiff_step == spiff_step, + ) + .first() + ) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json["tasks"] + bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] + process_instance.bpmn_json = json.dumps(bpmn_json) + + processor = ProcessInstanceProcessor(process_instance) + + spiff_tasks = None + if all_tasks: + spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + else: + spiff_tasks = processor.get_all_user_tasks() + + tasks = [] + for spiff_task in spiff_tasks: + task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task) + if get_task_data: + task.data = spiff_task.data + tasks.append(task) + + return make_response(jsonify(tasks), 200) + + +def process_instance_reset( + process_instance_id: int, + modified_process_model_identifier: str, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_reset.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + step_detail = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.spiff_step == spiff_step, + ) + .first() + ) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json["tasks"] + bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] + process_instance.bpmn_json = json.dumps(bpmn_json) + + db.session.add(process_instance) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="reset_process_instance_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def _get_process_instance( + modified_process_model_identifier: str, + process_instance: ProcessInstanceModel, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """_get_process_instance.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + try: + current_version_control_revision = GitService.get_current_revision() + except GitCommandError: + current_version_control_revision = "" + + process_model_with_diagram = None + name_of_file_with_diagram = None + if process_identifier: + spec_reference = SpecReferenceCache.query.filter_by( + identifier=process_identifier, type="process" + ).first() + if spec_reference is None: + raise SpecReferenceNotFoundError( + "Could not find given process identifier in the cache:" + f" {process_identifier}" + ) + + process_model_with_diagram = ProcessModelService.get_process_model( + spec_reference.process_model_id + ) + name_of_file_with_diagram = spec_reference.file_name + else: + process_model_with_diagram = _get_process_model(process_model_identifier) + if process_model_with_diagram.primary_file_name: + name_of_file_with_diagram = process_model_with_diagram.primary_file_name + + if process_model_with_diagram and name_of_file_with_diagram: + if ( + process_instance.bpmn_version_control_identifier + == current_version_control_revision + ): + bpmn_xml_file_contents = SpecFileService.get_data( + process_model_with_diagram, name_of_file_with_diagram + ).decode("utf-8") + else: + bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( + process_model_with_diagram, + process_instance.bpmn_version_control_identifier, + file_name=name_of_file_with_diagram, + ) + process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents + + return make_response(jsonify(process_instance), 200) + + +def _find_process_instance_for_me_or_raise( + process_instance_id: int, +) -> ProcessInstanceModel: + """_find_process_instance_for_me_or_raise.""" + process_instance: ProcessInstanceModel = ( + ProcessInstanceModel.query.filter_by(id=process_instance_id) + .outerjoin(HumanTaskModel) + .outerjoin( + HumanTaskUserModel, + and_( + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + HumanTaskUserModel.user_id == g.user.id, + ), + ) + .filter( + or_( + HumanTaskUserModel.id.is_not(None), + ProcessInstanceModel.process_initiator_id == g.user.id, + ) + ) + .first() + ) + + if process_instance is None: + raise ( + ApiError( + error_code="process_instance_cannot_be_found", + message=( + f"Process instance with id {process_instance_id} cannot be found" + " that is associated with you." + ), + status_code=400, + ) + ) + + return process_instance diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py new file mode 100644 index 000000000..1709357a7 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py @@ -0,0 +1,496 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import os +import re +from typing import Any +from typing import Dict +from typing import Optional +from typing import Union + +import connexion # type: ignore +import flask.wrappers +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.interfaces import IdToProcessGroupMapping +from spiffworkflow_backend.models.file import FileSchema +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.routes.process_api_blueprint import ( + _un_modify_modified_process_model_id, +) +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.git_service import MissingGitConfigsError +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +def process_model_create( + modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Process_model_create.""" + body_include_list = [ + "id", + "display_name", + "primary_file_name", + "primary_process_id", + "description", + "metadata_extraction_paths", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + _get_process_group_from_modified_identifier(modified_process_group_id) + + process_model_info = ProcessModelInfo(**body_filtered) # type: ignore + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + ProcessModelService.add_process_model(process_model_info) + _commit_and_push_to_git( + f"User: {g.user.username} created process model {process_model_info.id}" + ) + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def process_model_delete( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_model_delete.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + ProcessModelService().process_model_delete(process_model_identifier) + _commit_and_push_to_git( + f"User: {g.user.username} deleted process model {process_model_identifier}" + ) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_update( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> Any: + """Process_model_update.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + body_include_list = [ + "display_name", + "primary_file_name", + "primary_process_id", + "description", + "metadata_extraction_paths", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_model = _get_process_model(process_model_identifier) + ProcessModelService.update_process_model(process_model, body_filtered) + _commit_and_push_to_git( + f"User: {g.user.username} updated process model {process_model_identifier}" + ) + return ProcessModelInfoSchema().dump(process_model) + + +def process_model_show(modified_process_model_identifier: str) -> Any: + """Process_model_show.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + files = sorted( + SpecFileService.get_files(process_model), + key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index, + ) + process_model.files = files + for file in process_model.files: + file.references = SpecFileService.get_references_for_file(file, process_model) + + process_model.parent_groups = ProcessModelService.get_parent_group_array( + process_model.id + ) + return make_response(jsonify(process_model), 200) + + +def process_model_move( + modified_process_model_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_model_move.""" + original_process_model_id = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) + new_process_model = ProcessModelService().process_model_move( + original_process_model_id, new_location + ) + _commit_and_push_to_git( + f"User: {g.user.username} moved process model {original_process_model_id} to" + f" {new_process_model.id}" + ) + return make_response(jsonify(new_process_model), 200) + + +def process_model_publish( + modified_process_model_identifier: str, branch_to_update: Optional[str] = None +) -> flask.wrappers.Response: + """Process_model_publish.""" + if branch_to_update is None: + branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] + if branch_to_update is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH_TO_PUBLISH_TO. " + "This is required for publishing process models" + ) + process_model_identifier = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) + pr_url = GitService().publish(process_model_identifier, branch_to_update) + data = {"ok": True, "pr_url": pr_url} + return Response(json.dumps(data), status=200, mimetype="application/json") + + +def process_model_list( + process_group_identifier: Optional[str] = None, + recursive: Optional[bool] = False, + filter_runnable_by_user: Optional[bool] = False, + include_parent_groups: Optional[bool] = False, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process model list!""" + process_models = ProcessModelService.get_process_models( + process_group_id=process_group_identifier, + recursive=recursive, + filter_runnable_by_user=filter_runnable_by_user, + ) + process_models_to_return = ProcessModelService().get_batch( + process_models, page=page, per_page=per_page + ) + + if include_parent_groups: + process_group_cache = IdToProcessGroupMapping({}) + for process_model in process_models_to_return: + parent_group_lites_with_cache = ( + ProcessModelService.get_parent_group_array_and_cache_it( + process_model.id, process_group_cache + ) + ) + process_model.parent_groups = parent_group_lites_with_cache[ + "process_groups" + ] + + pages = len(process_models) // per_page + remainder = len(process_models) % per_page + if remainder > 0: + pages += 1 + response_json = { + "results": process_models_to_return, + "pagination": { + "count": len(process_models_to_return), + "total": len(process_models), + "pages": pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def process_model_file_update( + modified_process_model_identifier: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_update.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + + request_file = _get_file_from_request() + request_file_contents = request_file.stream.read() + if not request_file_contents: + raise ApiError( + error_code="file_contents_empty", + message="Given request file does not have any content", + status_code=400, + ) + + SpecFileService.update_file(process_model, file_name, request_file_contents) + _commit_and_push_to_git( + f"User: {g.user.username} clicked save for" + f" {process_model_identifier}/{file_name}" + ) + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_delete( + modified_process_model_identifier: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_delete.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + try: + SpecFileService.delete_file(process_model, file_name) + except FileNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_file_cannot_be_found", + message=f"Process model file cannot be found: {file_name}", + status_code=400, + ) + ) from exception + + _commit_and_push_to_git( + f"User: {g.user.username} deleted process model file" + f" {process_model_identifier}/{file_name}" + ) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_create( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_model_file_create.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + request_file = _get_file_from_request() + if not request_file.filename: + raise ApiError( + error_code="could_not_get_filename", + message="Could not get filename from request", + status_code=400, + ) + + file = SpecFileService.add_file( + process_model, request_file.filename, request_file.stream.read() + ) + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + _commit_and_push_to_git( + f"User: {g.user.username} added process model file" + f" {process_model_identifier}/{file.name}" + ) + return Response( + json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" + ) + + +def process_model_file_show( + modified_process_model_identifier: str, file_name: str +) -> Any: + """Process_model_file_show.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + files = SpecFileService.get_files(process_model, file_name) + if len(files) == 0: + raise ApiError( + error_code="unknown file", + message=( + f"No information exists for file {file_name}" + f" it does not exist in workflow {process_model_identifier}." + ), + status_code=404, + ) + + file = files[0] + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + return FileSchema().dump(file) + + +# { +# "natural_language_text": "Create a bug tracker process model \ +# with a bug-details form that collects summary, description, and priority" +# } +def process_model_create_with_natural_language( + modified_process_group_id: str, body: Dict[str, str] +) -> flask.wrappers.Response: + """Process_model_create_with_natural_language.""" + pattern = re.compile( + r"Create a (?P.*?) process model with a (?P.*?) form that" + r" collects (?P.*)" + ) + match = pattern.match(body["natural_language_text"]) + if match is None: + raise ApiError( + error_code="natural_language_text_not_yet_supported", + message=( + "Natural language text is not yet supported. Please use the form:" + f" {pattern.pattern}" + ), + status_code=400, + ) + process_model_display_name = match.group("pm_name") + process_model_identifier = re.sub(r"[ _]", "-", process_model_display_name) + process_model_identifier = re.sub(r"-{2,}", "-", process_model_identifier).lower() + + form_name = match.group("form_name") + form_identifier = re.sub(r"[ _]", "-", form_name) + form_identifier = re.sub(r"-{2,}", "-", form_identifier).lower() + + column_names = match.group("columns") + columns = re.sub(r"(, (and )?)", ",", column_names).split(",") + + process_group = _get_process_group_from_modified_identifier( + modified_process_group_id + ) + qualified_process_model_identifier = ( + f"{process_group.id}/{process_model_identifier}" + ) + + metadata_extraction_paths = [] + for column in columns: + metadata_extraction_paths.append({"key": column, "path": column}) + + process_model_attributes = { + "id": qualified_process_model_identifier, + "display_name": process_model_display_name, + "description": None, + "metadata_extraction_paths": metadata_extraction_paths, + } + + process_model_info = ProcessModelInfo(**process_model_attributes) # type: ignore + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + bpmn_template_file = os.path.join( + current_app.root_path, "templates", "basic_with_user_task_template.bpmn" + ) + if not os.path.exists(bpmn_template_file): + raise ApiError( + error_code="bpmn_template_file_does_not_exist", + message="Could not find the bpmn template file to create process model.", + status_code=500, + ) + + ProcessModelService.add_process_model(process_model_info) + bpmn_process_identifier = f"{process_model_identifier}_process" + bpmn_template_contents = "" + with open(bpmn_template_file, encoding="utf-8") as f: + bpmn_template_contents = f.read() + + bpmn_template_contents = bpmn_template_contents.replace( + "natural_language_process_id_template", bpmn_process_identifier + ) + bpmn_template_contents = bpmn_template_contents.replace( + "form-identifier-id-template", form_identifier + ) + + form_uischema_json: dict = {"ui:order": columns} + + form_properties: dict = {} + for column in columns: + form_properties[column] = { + "type": "string", + "title": column, + } + form_schema_json = { + "title": form_identifier, + "description": "", + "properties": form_properties, + "required": [], + } + + SpecFileService.add_file( + process_model_info, + f"{process_model_identifier}.bpmn", + str.encode(bpmn_template_contents), + ) + SpecFileService.add_file( + process_model_info, + f"{form_identifier}-schema.json", + str.encode(json.dumps(form_schema_json)), + ) + SpecFileService.add_file( + process_model_info, + f"{form_identifier}-uischema.json", + str.encode(json.dumps(form_uischema_json)), + ) + + _commit_and_push_to_git( + f"User: {g.user.username} created process model via natural language:" + f" {process_model_info.id}" + ) + + default_report_metadata = ProcessInstanceReportService.system_metadata_map( + "default" + ) + for column in columns: + default_report_metadata["columns"].append( + {"Header": column, "accessor": column, "filterable": True} + ) + ProcessInstanceReportModel.create_report( + identifier=process_model_identifier, + user=g.user, + report_metadata=default_report_metadata, + ) + + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def _get_file_from_request() -> Any: + """Get_file_from_request.""" + request_file = connexion.request.files.get("file") + if not request_file: + raise ApiError( + error_code="no_file_given", + message="Given request does not contain a file", + status_code=400, + ) + return request_file + + +def _get_process_group_from_modified_identifier( + modified_process_group_id: str, +) -> ProcessGroup: + """_get_process_group_from_modified_identifier.""" + if modified_process_group_id is None: + raise ApiError( + error_code="process_group_id_not_specified", + message=( + "Process Model could not be created when process_group_id path param is" + " unspecified" + ), + status_code=400, + ) + + unmodified_process_group_id = _un_modify_modified_process_model_id( + modified_process_group_id + ) + process_group = ProcessModelService.get_process_group(unmodified_process_group_id) + if process_group is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=( + "Process Model could not be created from given body because Process" + f" Group could not be found: {unmodified_process_group_id}" + ), + status_code=400, + ) + return process_group diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py new file mode 100644 index 000000000..e97b26ae6 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py @@ -0,0 +1,134 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import random +import string +from typing import Dict +from typing import Union + +import flask.wrappers +from flask import current_app +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from lxml import etree # type: ignore +from lxml.builder import ElementMaker # type: ignore + +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.routes.process_api_blueprint import ( + _get_required_parameter_or_raise, +) +from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +def script_unit_test_create( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_create.""" + bpmn_task_identifier = _get_required_parameter_or_raise( + "bpmn_task_identifier", body + ) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] + if file is None: + raise ApiError( + error_code="cannot_find_file", + message=( + "Could not find the primary bpmn file for process_model:" + f" {process_model.id}" + ), + status_code=404, + ) + + # TODO: move this to an xml service or something + file_contents = SpecFileService.get_data(process_model, file.name) + bpmn_etree_element = etree.fromstring(file_contents) + + nsmap = bpmn_etree_element.nsmap + spiff_element_maker = ElementMaker( + namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap + ) + + script_task_elements = bpmn_etree_element.xpath( + f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(script_task_elements) == 0: + raise ApiError( + error_code="missing_script_task", + message=f"Cannot find a script task with id: {bpmn_task_identifier}", + status_code=404, + ) + script_task_element = script_task_elements[0] + + extension_elements = None + extension_elements_array = script_task_element.xpath( + ".//bpmn:extensionElements", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(extension_elements_array) == 0: + bpmn_element_maker = ElementMaker( + namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap + ) + extension_elements = bpmn_element_maker("extensionElements") + script_task_element.append(extension_elements) + else: + extension_elements = extension_elements_array[0] + + unit_test_elements = None + unit_test_elements_array = extension_elements.xpath( + "//spiffworkflow:unitTests", + namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, + ) + if len(unit_test_elements_array) == 0: + unit_test_elements = spiff_element_maker("unitTests") + extension_elements.append(unit_test_elements) + else: + unit_test_elements = unit_test_elements_array[0] + + fuzz = "".join( + random.choice(string.ascii_uppercase + string.digits) # noqa: S311 + for _ in range(7) + ) + unit_test_id = f"unit_test_{fuzz}" + + input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) + expected_output_json_element = spiff_element_maker( + "expectedOutputJson", json.dumps(expected_output_json) + ) + unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) + unit_test_element.append(input_json_element) + unit_test_element.append(expected_output_json_element) + unit_test_elements.append(unit_test_element) + SpecFileService.update_file( + process_model, file.name, etree.tostring(bpmn_etree_element) + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_run( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_run.""" + # FIXME: We should probably clear this somewhere else but this works + current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None + current_app.config["THREAD_LOCAL_DATA"].spiff_step = None + + python_script = _get_required_parameter_or_raise("python_script", body) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + python_script, input_json, expected_output_json + ) + return make_response(jsonify(result), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/secrets_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/secrets_controller.py new file mode 100644 index 000000000..fdf4c7fae --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/secrets_controller.py @@ -0,0 +1,67 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Dict +from typing import Optional + +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response + +from spiffworkflow_backend.models.secret_model import SecretModel +from spiffworkflow_backend.models.secret_model import SecretModelSchema +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.user_service import UserService + + +def secret_show(key: str) -> Optional[str]: + """Secret_show.""" + return SecretService.get_secret(key) + + +def secret_list( + page: int = 1, + per_page: int = 100, +) -> Response: + """Secret_list.""" + secrets = ( + SecretModel.query.order_by(SecretModel.key) + .join(UserModel) + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + response_json = { + "results": secrets.items, + "pagination": { + "count": len(secrets.items), + "total": secrets.total, + "pages": secrets.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def secret_create(body: Dict) -> Response: + """Add secret.""" + secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) + return Response( + json.dumps(SecretModelSchema().dump(secret_model)), + status=201, + mimetype="application/json", + ) + + +def secret_update(key: str, body: dict) -> Response: + """Update secret.""" + SecretService().update_secret(key, body["value"], g.user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def secret_delete(key: str) -> Response: + """Delete secret.""" + current_user = UserService.current_user() + SecretService.delete_secret(key, current_user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py new file mode 100644 index 000000000..a1708ce8d --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py @@ -0,0 +1,49 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json + +import flask.wrappers +import werkzeug +from flask import current_app +from flask import g +from flask import redirect +from flask import request +from flask.wrappers import Response + +from spiffworkflow_backend.routes.user import verify_token +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.service_task_service import ServiceTaskService + + +def service_task_list() -> flask.wrappers.Response: + """Service_task_list.""" + available_connectors = ServiceTaskService.available_connectors() + return Response( + json.dumps(available_connectors), status=200, mimetype="application/json" + ) + + +def authentication_list() -> flask.wrappers.Response: + """Authentication_list.""" + available_authentications = ServiceTaskService.authentication_list() + response_json = { + "results": available_authentications, + "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], + "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", + } + + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def authentication_callback( + service: str, + auth_method: str, +) -> werkzeug.wrappers.Response: + """Authentication_callback.""" + verify_token(request.args.get("token"), force_run=True) + response = request.args["response"] + SecretService().update_secret( + f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True + ) + return redirect( + f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" + ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py new file mode 100644 index 000000000..a7d3bf869 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -0,0 +1,563 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import os +import uuid +from typing import Any +from typing import Dict +from typing import Optional +from typing import TypedDict +from typing import Union + +import flask.wrappers +import jinja2 +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState +from sqlalchemy import and_ +from sqlalchemy import asc +from sqlalchemy import desc +from sqlalchemy import func +from sqlalchemy.orm import aliased + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_principal_or_raise, +) +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_process_instance_by_id_or_raise, +) +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +class TaskDataSelectOption(TypedDict): + """TaskDataSelectOption.""" + + value: str + label: str + + +class ReactJsonSchemaSelectOption(TypedDict): + """ReactJsonSchemaSelectOption.""" + + type: str + title: str + enum: list[str] + + +# TODO: see comment for before_request +# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) +def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_my_tasks.""" + principal = _find_principal_or_raise() + human_tasks = ( + HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore + .join(ProcessInstanceModel) + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .filter(HumanTaskModel.completed == False) # noqa: E712 + # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. + .add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, + ProcessInstanceModel.status, + HumanTaskModel.task_name, + HumanTaskModel.task_title, + HumanTaskModel.task_type, + HumanTaskModel.task_status, + HumanTaskModel.task_id, + HumanTaskModel.id, + HumanTaskModel.process_model_display_name, + HumanTaskModel.process_instance_id, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items] + + response_json = { + "results": tasks, + "pagination": { + "count": len(human_tasks.items), + "total": human_tasks.total, + "pages": human_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def task_list_for_my_open_processes( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_my_open_processes.""" + return _get_tasks(page=page, per_page=per_page) + + +def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_for_me.""" + return _get_tasks( + processes_started_by_user=False, + has_lane_assignment_id=False, + page=page, + per_page=per_page, + ) + + +def task_list_for_my_groups( + user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_my_groups.""" + return _get_tasks( + user_group_identifier=user_group_identifier, + processes_started_by_user=False, + page=page, + per_page=per_page, + ) + + +def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: + """Task_show.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + + if process_instance.status == ProcessInstanceStatus.suspended.value: + raise ApiError( + error_code="error_suspended", + message="The process instance is suspended", + status_code=400, + ) + + process_model = _get_process_model( + process_instance.process_model_identifier, + ) + + human_task = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id + ).first() + if human_task is None: + raise ( + ApiError( + error_code="no_human_task", + message=( + f"Cannot find a task to complete for task id '{task_id}' and" + f" process instance {process_instance_id}." + ), + status_code=500, + ) + ) + + form_schema_file_name = "" + form_ui_schema_file_name = "" + spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance) + extensions = spiff_task.task_spec.extensions + + if "properties" in extensions: + properties = extensions["properties"] + if "formJsonSchemaFilename" in properties: + form_schema_file_name = properties["formJsonSchemaFilename"] + if "formUiSchemaFilename" in properties: + form_ui_schema_file_name = properties["formUiSchemaFilename"] + processor = ProcessInstanceProcessor(process_instance) + task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task) + task.data = spiff_task.data + task.process_model_display_name = process_model.display_name + task.process_model_identifier = process_model.id + + process_model_with_form = process_model + refs = SpecFileService.get_references_for_process(process_model_with_form) + all_processes = [i.identifier for i in refs] + if task.process_identifier not in all_processes: + bpmn_file_full_path = ( + ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( + task.process_identifier + ) + ) + relative_path = os.path.relpath( + bpmn_file_full_path, start=FileSystemService.root_path() + ) + process_model_relative_path = os.path.dirname(relative_path) + process_model_with_form = ( + ProcessModelService.get_process_model_from_relative_path( + process_model_relative_path + ) + ) + + if task.type == "User Task": + if not form_schema_file_name: + raise ( + ApiError( + error_code="missing_form_file", + message=( + "Cannot find a form file for process_instance_id:" + f" {process_instance_id}, task_id: {task_id}" + ), + status_code=400, + ) + ) + + form_contents = _prepare_form_data( + form_schema_file_name, + task.data, + process_model_with_form, + ) + + try: + # form_contents is a str + form_dict = json.loads(form_contents) + except Exception as exception: + raise ( + ApiError( + error_code="error_loading_form", + message=( + f"Could not load form schema from: {form_schema_file_name}." + f" Error was: {str(exception)}" + ), + status_code=400, + ) + ) from exception + + if task.data: + _update_form_schema_with_task_data_as_needed(form_dict, task.data) + + if form_contents: + task.form_schema = form_dict + + if form_ui_schema_file_name: + ui_form_contents = _prepare_form_data( + form_ui_schema_file_name, + task.data, + process_model_with_form, + ) + if ui_form_contents: + task.form_ui_schema = ui_form_contents + + if task.properties and task.data and "instructionsForEndUser" in task.properties: + if task.properties["instructionsForEndUser"]: + task.properties["instructionsForEndUser"] = _render_jinja_template( + task.properties["instructionsForEndUser"], task.data + ) + return make_response(jsonify(task), 200) + + +def process_data_show( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_data_show.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + processor = ProcessInstanceProcessor(process_instance) + all_process_data = processor.get_data() + process_data_value = None + if process_data_identifier in all_process_data: + process_data_value = all_process_data[process_data_identifier] + + return make_response( + jsonify( + { + "process_data_identifier": process_data_identifier, + "process_data_value": process_data_value, + } + ), + 200, + ) + + +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + principal = _find_principal_or_raise() + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + if not process_instance.can_submit_task(): + raise ApiError( + error_code="process_instance_not_runnable", + message=( + f"Process Instance ({process_instance.id}) has status " + f"{process_instance.status} which does not allow tasks to be submitted." + ), + status_code=400, + ) + + processor = ProcessInstanceProcessor(process_instance) + spiff_task = _get_spiff_task_from_process_instance( + task_id, process_instance, processor=processor + ) + AuthorizationService.assert_user_can_complete_spiff_task( + process_instance.id, spiff_task, principal.user + ) + + if spiff_task.state != TaskState.READY: + raise ( + ApiError( + error_code="invalid_state", + message="You may not update a task unless it is in the READY state.", + status_code=400, + ) + ) + + if terminate_loop and spiff_task.is_looping(): + spiff_task.terminate_loop() + + human_task = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id, completed=False + ).first() + if human_task is None: + raise ( + ApiError( + error_code="no_human_task", + message=( + f"Cannot find a task to complete for task id '{task_id}' and" + f" process instance {process_instance_id}." + ), + status_code=500, + ) + ) + + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + + # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same + # task spec, complete that form as well. + # if update_all: + # last_index = spiff_task.task_info()["mi_index"] + # next_task = processor.next_task() + # while next_task and next_task.task_info()["mi_index"] > last_index: + # __update_task(processor, next_task, form_data, user) + # last_index = next_task.task_info()["mi_index"] + # next_task = processor.next_task() + + next_human_task_assigned_to_me = ( + HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, completed=False + ) + .order_by(asc(HumanTaskModel.id)) # type: ignore + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_human_task_assigned_to_me: + return make_response( + jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200 + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def _get_tasks( + processes_started_by_user: bool = True, + has_lane_assignment_id: bool = True, + page: int = 1, + per_page: int = 100, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Get_tasks.""" + user_id = g.user.id + + # use distinct to ensure we only get one row per human task otherwise + # we can get back multiple for the same human task row which throws off + # pagination later on + # https://stackoverflow.com/q/34582014/6090676 + human_tasks_query = ( + db.session.query(HumanTaskModel) + .group_by(HumanTaskModel.id) # type: ignore + .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id) + .join(ProcessInstanceModel) + .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + .filter(HumanTaskModel.completed == False) # noqa: E712 + ) + + assigned_user = aliased(UserModel) + if processes_started_by_user: + human_tasks_query = ( + human_tasks_query.filter( + ProcessInstanceModel.process_initiator_id == user_id + ) + .outerjoin( + HumanTaskUserModel, + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + ) + .outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id) + ) + else: + human_tasks_query = human_tasks_query.filter( + ProcessInstanceModel.process_initiator_id != user_id + ).join( + HumanTaskUserModel, + and_( + HumanTaskUserModel.user_id == user_id, + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + ), + ) + if has_lane_assignment_id: + if user_group_identifier: + human_tasks_query = human_tasks_query.filter( + GroupModel.identifier == user_group_identifier + ) + else: + human_tasks_query = human_tasks_query.filter( + HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore + ) + else: + human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore + + human_tasks = ( + human_tasks_query.add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.status.label("process_instance_status"), # type: ignore + ProcessInstanceModel.updated_at_in_seconds, + ProcessInstanceModel.created_at_in_seconds, + UserModel.username.label("process_initiator_username"), # type: ignore + GroupModel.identifier.label("assigned_user_group_identifier"), + HumanTaskModel.task_name, + HumanTaskModel.task_title, + HumanTaskModel.process_model_display_name, + HumanTaskModel.process_instance_id, + func.group_concat(assigned_user.username.distinct()).label( + "potential_owner_usernames" + ), + ) + .order_by(desc(HumanTaskModel.id)) # type: ignore + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": human_tasks.items, + "pagination": { + "count": len(human_tasks.items), + "total": human_tasks.total, + "pages": human_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def _prepare_form_data( + form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo +) -> str: + """Prepare_form_data.""" + if task_data is None: + return "" + + file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") + return _render_jinja_template(file_contents, task_data) + + +def _render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: + """Render_jinja_template.""" + jinja_environment = jinja2.Environment( + autoescape=True, lstrip_blocks=True, trim_blocks=True + ) + template = jinja_environment.from_string(unprocessed_template) + return template.render(**data) + + +def _get_spiff_task_from_process_instance( + task_id: str, + process_instance: ProcessInstanceModel, + processor: Union[ProcessInstanceProcessor, None] = None, +) -> SpiffTask: + """Get_spiff_task_from_process_instance.""" + if processor is None: + processor = ProcessInstanceProcessor(process_instance) + task_uuid = uuid.UUID(task_id) + spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + + if spiff_task is None: + raise ( + ApiError( + error_code="empty_task", + message="Processor failed to obtain task.", + status_code=500, + ) + ) + return spiff_task + + +# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches +def _update_form_schema_with_task_data_as_needed( + in_dict: dict, task_data: dict +) -> None: + """Update_nested.""" + for k, value in in_dict.items(): + if "anyOf" == k: + # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] + if isinstance(value, list): + if len(value) == 1: + first_element_in_value_list = value[0] + if isinstance(first_element_in_value_list, str): + if first_element_in_value_list.startswith( + "options_from_task_data_var:" + ): + task_data_var = first_element_in_value_list.replace( + "options_from_task_data_var:", "" + ) + + if task_data_var not in task_data: + raise ( + ApiError( + error_code="missing_task_data_var", + message=( + "Task data is missing variable:" + f" {task_data_var}" + ), + status_code=500, + ) + ) + + select_options_from_task_data = task_data.get(task_data_var) + if isinstance(select_options_from_task_data, list): + if all( + "value" in d and "label" in d + for d in select_options_from_task_data + ): + + def map_function( + task_data_select_option: TaskDataSelectOption, + ) -> ReactJsonSchemaSelectOption: + """Map_function.""" + return { + "type": "string", + "enum": [task_data_select_option["value"]], + "title": task_data_select_option["label"], + } + + options_for_react_json_schema_form = list( + map(map_function, select_options_from_task_data) + ) + + in_dict[k] = options_for_react_json_schema_form + elif isinstance(value, dict): + _update_form_schema_with_task_data_as_needed(value, task_data) + elif isinstance(value, list): + for o in value: + if isinstance(o, dict): + _update_form_schema_with_task_data_as_needed(o, task_data) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index ad98fbbc6..1ac6207c0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -67,16 +67,19 @@ def verify_token( user_model = get_user_from_decoded_internal_token(decoded_token) except Exception as e: current_app.logger.error( - f"Exception in verify_token getting user from decoded internal token. {e}" + "Exception in verify_token getting user from decoded" + f" internal token. {e}" ) elif "iss" in decoded_token.keys(): try: if AuthenticationService.validate_id_token(token): user_info = decoded_token - except ApiError as ae: # API Error is only thrown in the token is outdated. + except ( + ApiError + ) as ae: # API Error is only thrown in the token is outdated. # Try to refresh the token user = UserService.get_user_by_service_and_service_id( - "open_id", decoded_token["sub"] + decoded_token["iss"], decoded_token["sub"] ) if user: refresh_token = AuthenticationService.get_refresh_token(user.id) @@ -105,10 +108,12 @@ def verify_token( ) from e if ( - user_info is not None and "error" not in user_info + user_info is not None + and "error" not in user_info + and "iss" in user_info ): # not sure what to test yet user_model = ( - UserModel.query.filter(UserModel.service == "open_id") + UserModel.query.filter(UserModel.service == user_info["iss"]) .filter(UserModel.service_id == user_info["sub"]) .first() ) @@ -293,7 +298,6 @@ def get_decoded_token(token: str) -> Optional[Dict]: try: decoded_token = jwt.decode(token, options={"verify_signature": False}) except Exception as e: - print(f"Exception in get_token_type: {e}") raise ApiError( error_code="invalid_token", message="Cannot decode token." ) from e @@ -341,9 +345,5 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo ) if user: return user - user = UserModel( - username=service_id, - service=service, - service_id=service_id, - ) + user = UserService.create_user(service_id, service, service_id) return user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py index 29bbddcd1..fd5c1ae90 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py @@ -26,6 +26,7 @@ user_blueprint = Blueprint("main", __name__) # user = UserService.create_user('internal', username) # return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON) + # def _create_user(username): # user = UserModel.query.filter_by(username=username).first() # if user is not None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py new file mode 100644 index 000000000..5dce5b43e --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py @@ -0,0 +1,26 @@ +"""Users_controller.""" +import flask +from flask import g +from flask import jsonify +from flask import make_response + +from spiffworkflow_backend.models.user import UserModel + + +def user_search(username_prefix: str) -> flask.wrappers.Response: + """User_search.""" + found_users = UserModel.query.filter(UserModel.username.like(f"{username_prefix}%")).all() # type: ignore + + response_json = { + "users": found_users, + "username_prefix": username_prefix, + } + return make_response(jsonify(response_json), 200) + + +def user_group_list_for_current_user() -> flask.wrappers.Response: + """User_group_list_for_current_user.""" + groups = g.user.groups + # TODO: filter out the default group and have a way to know what is the default group + group_identifiers = [i.identifier for i in groups if i.identifier != "everybody"] + return make_response(jsonify(sorted(group_identifiers)), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/add_user_to_group.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/add_user_to_group.py deleted file mode 100644 index d3c777118..000000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/add_user_to_group.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Get_env.""" -from typing import Any - -from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.models.group import GroupNotFoundError -from spiffworkflow_backend.models.script_attributes_context import ( - ScriptAttributesContext, -) -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.models.user import UserNotFoundError -from spiffworkflow_backend.scripts.script import Script -from spiffworkflow_backend.services.user_service import UserService - - -class AddUserToGroup(Script): - """AddUserToGroup.""" - - def get_description(self) -> str: - """Get_description.""" - return """Add a given user to a given group.""" - - def run( - self, - script_attributes_context: ScriptAttributesContext, - *args: Any, - **kwargs: Any, - ) -> Any: - """Run.""" - username = args[0] - group_identifier = args[1] - user = UserModel.query.filter_by(username=username).first() - if user is None: - raise UserNotFoundError( - f"Script 'add_user_to_group' could not find a user with username: {username}" - ) - - group = GroupModel.query.filter_by(identifier=group_identifier).first() - if group is None: - raise GroupNotFoundError( - f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'." - ) - - UserService.add_user_to_group(user, group) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py new file mode 100644 index 000000000..5b4225253 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py @@ -0,0 +1,63 @@ +"""Delete_process_instances_with_criteria.""" +from time import time +from typing import Any + +from flask_bpmn.models.db import db +from sqlalchemy import or_ + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel +from spiffworkflow_backend.scripts.script import Script + + +class DeleteProcessInstancesWithCriteria(Script): + """DeleteProcessInstancesWithCriteria.""" + + def get_description(self) -> str: + """Get_description.""" + return "Delete process instances that match the provided criteria," + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + criteria_list = args[0] + + delete_criteria = [] + delete_time = time() + + for criteria in criteria_list: + delete_criteria.append( + (ProcessInstanceModel.process_model_identifier == criteria["name"]) + & ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore + & ( + ProcessInstanceModel.updated_at_in_seconds + < (delete_time - criteria["last_updated_delta"]) + ) + ) + + results = ( + ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all() + ) + rows_affected = len(results) + + if rows_affected > 0: + ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore + + step_details = SpiffStepDetailsModel.query.filter( + SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore + ).all() + + for deletion in step_details: + db.session.delete(deletion) + for deletion in results: + db.session.delete(deletion) + db.session.commit() + + return rows_affected diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py index ee86a84a7..c739d15aa 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py @@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script class FactService(Script): """FactService.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Just your basic class that can pull in data from a few api endpoints and @@ -30,7 +35,10 @@ class FactService(Script): if fact == "cat": details = "The cat in the hat" # self.get_cat() elif fact == "norris": - details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." + details = ( + "Chuck Norris doesn’t read books. He stares them down until he gets the" + " information he wants." + ) elif fact == "buzzword": details = "Move the Needle." # self.get_buzzword() else: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py new file mode 100644 index 000000000..e2ab07637 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py @@ -0,0 +1,71 @@ +"""Get_env.""" +from collections import OrderedDict +from typing import Any + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.permission_target import PermissionTargetModel +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script + + +class GetAllPermissions(Script): + """GetAllPermissions.""" + + def get_description(self) -> str: + """Get_description.""" + return """Get all permissions currently in the system.""" + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + permission_assignments = ( + PermissionAssignmentModel.query.join( + PrincipalModel, + PrincipalModel.id == PermissionAssignmentModel.principal_id, + ) + .join(GroupModel, GroupModel.id == PrincipalModel.group_id) + .join( + PermissionTargetModel, + PermissionTargetModel.id + == PermissionAssignmentModel.permission_target_id, + ) + .add_columns( + PermissionAssignmentModel.permission, + PermissionTargetModel.uri, + GroupModel.identifier.label("group_identifier"), + ) + ) + + permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict() + for pa in permission_assignments: + permissions.setdefault((pa.group_identifier, pa.uri), []).append( + pa.permission + ) + + def replace_suffix(string: str, old: str, new: str) -> str: + """Replace_suffix.""" + if string.endswith(old): + return string[: -len(old)] + new + return string + + # sort list of strings based on a specific order + def sort_by_order(string_list: list, order: list) -> list: + """Sort_by_order.""" + return sorted(string_list, key=lambda x: order.index(x)) + + return [ + { + "group_identifier": k[0], + "uri": replace_suffix(k[1], "%", "*"), + "permissions": sort_by_order(v, ["create", "read", "update", "delete"]), + } + for k, v in permissions.items() + ] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py index a1a1b47e9..66d21a4ca 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py @@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script class GetCurrentUser(Script): """GetCurrentUser.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Return the current user.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py index cd586ae00..7a6b0f44c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py @@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script class GetEnv(Script): """GetEnv.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Returns the current environment - ie testing, staging, production.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py index 9490df95a..b128214ab 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py @@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script class GetFrontendUrl(Script): """GetFrontendUrl.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Return the url to the frontend.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py index 243a8c524..0f20fbb3c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py @@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script class GetGroupMembers(Script): """GetGroupMembers.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Return the list of usernames of the users in the given group.""" @@ -27,7 +32,8 @@ class GetGroupMembers(Script): group = GroupModel.query.filter_by(identifier=group_identifier).first() if group is None: raise GroupNotFoundError( - f"Script 'get_group_members' could not find group with identifier '{group_identifier}'." + "Script 'get_group_members' could not find group with identifier" + f" '{group_identifier}'." ) usernames = [u.username for u in group.users] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py index 689b86d8c..7c688e56f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py @@ -14,6 +14,11 @@ from spiffworkflow_backend.scripts.script import Script class GetLocaltime(Script): """GetLocaltime.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Converts a Datetime object into a Datetime object for a specific timezone. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py index 45c70d6ba..99eb4ce26 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py @@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script class GetProcessInfo(Script): """GetProcessInfo.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Returns a dictionary of information about the currently running process.""" @@ -23,5 +28,7 @@ class GetProcessInfo(Script): """Run.""" return { "process_instance_id": script_attributes_context.process_instance_id, - "process_model_identifier": script_attributes_context.process_model_identifier, + "process_model_identifier": ( + script_attributes_context.process_model_identifier + ), } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/refresh_permissions.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/refresh_permissions.py new file mode 100644 index 000000000..4981af93d --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/refresh_permissions.py @@ -0,0 +1,39 @@ +"""Get_env.""" +from typing import Any + +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script +from spiffworkflow_backend.services.authorization_service import AuthorizationService + + +class RefreshPermissions(Script): + """RefreshPermissions.""" + + def get_description(self) -> str: + """Get_description.""" + return """Add permissions using a dict. + group_info: [ + { + 'name': group_identifier, + 'users': array_of_users, + 'permissions': [ + { + 'actions': array_of_actions - create, read, etc, + 'uri': target_uri + } + ] + } + ] + """ + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + group_info = args[0] + AuthorizationService.refresh_permissions(group_info) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py index b744694a2..7ca798466 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py @@ -10,9 +10,12 @@ from typing import Callable from flask_bpmn.api.api_error import ApiError +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceNotFoundError from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) +from spiffworkflow_backend.services.authorization_service import AuthorizationService # Generally speaking, having some global in a flask app is TERRIBLE. # This is here, because after loading the application this will never change under @@ -20,6 +23,10 @@ from spiffworkflow_backend.models.script_attributes_context import ( SCRIPT_SUB_CLASSES = None +class ScriptUnauthorizedForUserError(Exception): + """ScriptUnauthorizedForUserError.""" + + class Script: """Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks.""" @@ -43,6 +50,15 @@ class Script: + "does not properly implement the run function.", ) + @staticmethod + def requires_privileged_permissions() -> bool: + """It seems safer to default to True and make safe functions opt in for any user to run them. + + To give access to script for a given user, add a 'create' permission with following target-uri: + '/can-run-privileged-script/{script_name}' + """ + return True + @staticmethod def generate_augmented_list( script_attributes_context: ScriptAttributesContext, @@ -71,18 +87,52 @@ class Script: that we created. """ instance = subclass() - return lambda *ar, **kw: subclass.run( - instance, - script_attributes_context, - *ar, - **kw, - ) + + def check_script_permission() -> None: + """Check_script_permission.""" + if subclass.requires_privileged_permissions(): + script_function_name = get_script_function_name(subclass) + uri = f"/can-run-privileged-script/{script_function_name}" + process_instance = ProcessInstanceModel.query.filter_by( + id=script_attributes_context.process_instance_id + ).first() + if process_instance is None: + raise ProcessInstanceNotFoundError( + "Could not find a process instance with id" + f" '{script_attributes_context.process_instance_id}' when" + f" running script '{script_function_name}'" + ) + user = process_instance.process_initiator + has_permission = AuthorizationService.user_has_permission( + user=user, permission="create", target_uri=uri + ) + if not has_permission: + raise ScriptUnauthorizedForUserError( + f"User {user.username} does not have access to run" + f" privileged script '{script_function_name}'" + ) + + def run_script_if_allowed(*ar: Any, **kw: Any) -> Any: + """Run_script_if_allowed.""" + check_script_permission() + return subclass.run( + instance, + script_attributes_context, + *ar, + **kw, + ) + + return run_script_if_allowed + + def get_script_function_name(subclass: type[Script]) -> str: + """Get_script_function_name.""" + return subclass.__module__.split(".")[-1] execlist = {} subclasses = Script.get_all_subclasses() for x in range(len(subclasses)): subclass = subclasses[x] - execlist[subclass.__module__.split(".")[-1]] = make_closure( + execlist[get_script_function_name(subclass)] = make_closure( subclass, script_attributes_context=script_attributes_context ) return execlist @@ -101,7 +151,7 @@ class Script: """_get_all_subclasses.""" # hackish mess to make sure we have all the modules loaded for the scripts pkg_dir = os.path.dirname(__file__) - for (_module_loader, name, _ispkg) in pkgutil.iter_modules([pkg_dir]): + for _module_loader, name, _ispkg in pkgutil.iter_modules([pkg_dir]): importlib.import_module("." + name, __package__) """Returns a list of all classes that extend this class.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py index 81488910e..6bbcad331 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py @@ -29,7 +29,6 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]: # suspended - 6 hours ago process_instances = [] for i in range(len(statuses)): - process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( test_process_model_id, user ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 95c1eaa89..fd2bdb898 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -93,7 +93,7 @@ class AuthenticationService: + f"?state={state}&" + "response_type=code&" + f"client_id={self.client_id()}&" - + "scope=openid&" + + "scope=openid profile email&" + f"redirect_uri={return_redirect_url}" ) return login_redirect_url diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index a2f41ac10..69d19cb7f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -1,10 +1,14 @@ """Authorization_service.""" import inspect import re +from dataclasses import dataclass from hashlib import sha256 from hmac import compare_digest from hmac import HMAC +from typing import Any from typing import Optional +from typing import Set +from typing import TypedDict from typing import Union import jwt @@ -19,6 +23,7 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from sqlalchemy import or_ from sqlalchemy import text +from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel @@ -45,6 +50,40 @@ class UserDoesNotHaveAccessToTaskError(Exception): """UserDoesNotHaveAccessToTaskError.""" +class InvalidPermissionError(Exception): + """InvalidPermissionError.""" + + +@dataclass +class PermissionToAssign: + """PermissionToAssign.""" + + permission: str + target_uri: str + + +# the relevant permissions are the only API methods that are currently available for each path prefix. +# if we add further API methods, we'll need to evaluate whether they should be added here. +PATH_SEGMENTS_FOR_PERMISSION_ALL = [ + {"path": "/logs", "relevant_permissions": ["read"]}, + { + "path": "/process-instances", + "relevant_permissions": ["create", "read", "delete"], + }, + {"path": "/process-instance-suspend", "relevant_permissions": ["create"]}, + {"path": "/process-instance-terminate", "relevant_permissions": ["create"]}, + {"path": "/task-data", "relevant_permissions": ["read", "update"]}, + {"path": "/process-data", "relevant_permissions": ["read"]}, +] + + +class DesiredPermissionDict(TypedDict): + """DesiredPermissionDict.""" + + group_identifiers: Set[str] + permission_assignments: list[PermissionAssignmentModel] + + class AuthorizationService: """Determine whether a user has permission to perform their request.""" @@ -75,6 +114,7 @@ class AuthorizationService: ) -> bool: """Has_permission.""" principal_ids = [p.id for p in principals] + target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX) permission_assignments = ( PermissionAssignmentModel.query.filter( @@ -84,10 +124,13 @@ class AuthorizationService: .join(PermissionTargetModel) .filter( or_( - text(f"'{target_uri}' LIKE permission_target.uri"), + text(f"'{target_uri_normalized}' LIKE permission_target.uri"), # to check for exact matches as well # see test_user_can_access_base_path_when_given_wildcard_permission unit test - text(f"'{target_uri}' = replace(permission_target.uri, '/%', '')"), + text( + f"'{target_uri_normalized}' =" + " replace(replace(permission_target.uri, '/%', ''), ':%', '')" + ), ) ) .all() @@ -127,17 +170,15 @@ class AuthorizationService: return cls.has_permission(principals, permission, target_uri) @classmethod - def delete_all_permissions_and_recreate(cls) -> None: - """Delete_all_permissions_and_recreate.""" + def delete_all_permissions(cls) -> None: + """Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?""" for model in [PermissionAssignmentModel, PermissionTargetModel]: db.session.query(model).delete() # cascading to principals doesn't seem to work when attempting to delete all so do it like this instead for group in GroupModel.query.all(): db.session.delete(group) - db.session.commit() - cls.import_permissions_from_yaml_file() @classmethod def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None: @@ -155,12 +196,13 @@ class AuthorizationService: @classmethod def import_permissions_from_yaml_file( cls, raise_if_missing_user: bool = False - ) -> None: + ) -> DesiredPermissionDict: """Import_permissions_from_yaml_file.""" if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None: raise ( PermissionsFileNotSetError( - "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions" + "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in" + " order to import permissions" ) ) @@ -169,13 +211,16 @@ class AuthorizationService: permission_configs = yaml.safe_load(file) default_group = None + unique_user_group_identifiers: Set[str] = set() if "default_group" in permission_configs: default_group_identifier = permission_configs["default_group"] default_group = GroupService.find_or_create_group(default_group_identifier) + unique_user_group_identifiers.add(default_group_identifier) if "groups" in permission_configs: for group_identifier, group_config in permission_configs["groups"].items(): group = GroupService.find_or_create_group(group_identifier) + unique_user_group_identifiers.add(group_identifier) for username in group_config["users"]: user = UserModel.query.filter_by(username=username).first() if user is None: @@ -188,26 +233,25 @@ class AuthorizationService: continue cls.associate_user_with_group(user, group) + permission_assignments = [] if "permissions" in permission_configs: for _permission_identifier, permission_config in permission_configs[ "permissions" ].items(): uri = permission_config["uri"] - uri_with_percent = re.sub(r"\*", "%", uri) - permission_target = PermissionTargetModel.query.filter_by( - uri=uri_with_percent - ).first() - if permission_target is None: - permission_target = PermissionTargetModel(uri=uri_with_percent) - db.session.add(permission_target) - db.session.commit() + permission_target = cls.find_or_create_permission_target(uri) for allowed_permission in permission_config["allowed_permissions"]: if "groups" in permission_config: for group_identifier in permission_config["groups"]: group = GroupService.find_or_create_group(group_identifier) - cls.create_permission_for_principal( - group.principal, permission_target, allowed_permission + unique_user_group_identifiers.add(group_identifier) + permission_assignments.append( + cls.create_permission_for_principal( + group.principal, + permission_target, + allowed_permission, + ) ) if "users" in permission_config: for username in permission_config["users"]: @@ -218,14 +262,35 @@ class AuthorizationService: .filter(UserModel.username == username) .first() ) - cls.create_permission_for_principal( - principal, permission_target, allowed_permission + permission_assignments.append( + cls.create_permission_for_principal( + principal, permission_target, allowed_permission + ) ) if default_group is not None: for user in UserModel.query.all(): cls.associate_user_with_group(user, default_group) + return { + "group_identifiers": unique_user_group_identifiers, + "permission_assignments": permission_assignments, + } + + @classmethod + def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel: + """Find_or_create_permission_target.""" + uri_with_percent = re.sub(r"\*", "%", uri) + target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX) + permission_target: Optional[PermissionTargetModel] = ( + PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first() + ) + if permission_target is None: + permission_target = PermissionTargetModel(uri=target_uri_normalized) + db.session.add(permission_target) + db.session.commit() + return permission_target + @classmethod def create_permission_for_principal( cls, @@ -234,13 +299,13 @@ class AuthorizationService: permission: str, ) -> PermissionAssignmentModel: """Create_permission_for_principal.""" - permission_assignment: Optional[ - PermissionAssignmentModel - ] = PermissionAssignmentModel.query.filter_by( - principal_id=principal.id, - permission_target_id=permission_target.id, - permission=permission, - ).first() + permission_assignment: Optional[PermissionAssignmentModel] = ( + PermissionAssignmentModel.query.filter_by( + principal_id=principal.id, + permission_target_id=permission_target.id, + permission=permission, + ).first() + ) if permission_assignment is None: permission_assignment = PermissionAssignmentModel( principal_id=principal.id, @@ -340,7 +405,10 @@ class AuthorizationService: raise ApiError( error_code="unauthorized", - message=f"User {g.user.username} is not authorized to perform requested action: {permission_string} - {request.path}", + message=( + f"User {g.user.username} is not authorized to perform requested action:" + f" {permission_string} - {request.path}" + ), status_code=403, ) @@ -419,7 +487,10 @@ class AuthorizationService: except jwt.InvalidTokenError as exception: raise ApiError( "token_invalid", - "The Authentication token you provided is invalid. You need a new token. ", + ( + "The Authentication token you provided is invalid. You need a new" + " token. " + ), ) from exception @staticmethod @@ -441,41 +512,57 @@ class AuthorizationService: if user not in human_task.potential_owners: raise UserDoesNotHaveAccessToTaskError( - f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'" - f" for process instance '{process_instance_id}'" + f"User {user.username} does not have access to update" + f" task'{spiff_task.task_spec.name}' for process instance" + f" '{process_instance_id}'" ) return True @classmethod def create_user_from_sign_in(cls, user_info: dict) -> UserModel: """Create_user_from_sign_in.""" + """Name, family_name, given_name, middle_name, nickname, preferred_username,""" + """Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """ + """Email.""" is_new_user = False user_model = ( - UserModel.query.filter(UserModel.service == "open_id") + UserModel.query.filter(UserModel.service == user_info["iss"]) .filter(UserModel.service_id == user_info["sub"]) .first() ) + email = display_name = username = "" + if "email" in user_info: + username = user_info["email"] + email = user_info["email"] + else: # we fall back to the sub, which may be very ugly. + username = user_info["sub"] + "@" + user_info["iss"] + + if "preferred_username" in user_info: + display_name = user_info["preferred_username"] + elif "nickname" in user_info: + display_name = user_info["nickname"] + elif "name" in user_info: + display_name = user_info["name"] if user_model is None: current_app.logger.debug("create_user in login_return") is_new_user = True - name = username = email = "" - if "name" in user_info: - name = user_info["name"] - if "username" in user_info: - username = user_info["username"] - elif "preferred_username" in user_info: - username = user_info["preferred_username"] - if "email" in user_info: - email = user_info["email"] user_model = UserService().create_user( - service="open_id", - service_id=user_info["sub"], - name=name, username=username, + service=user_info["iss"], + service_id=user_info["sub"], email=email, + display_name=display_name, ) + else: + # Update with the latest information + user_model.username = username + user_model.email = email + user_model.display_name = display_name + user_model.service = user_info["iss"] + user_model.service_id = user_info["sub"] + # this may eventually get too slow. # when it does, be careful about backgrounding, because # the user will immediately need permissions to use the site. @@ -490,6 +577,224 @@ class AuthorizationService: # this cannot be None so ignore mypy return user_model # type: ignore + @classmethod + def get_permissions_to_assign( + cls, + permission_set: str, + process_related_path_segment: str, + target_uris: list[str], + ) -> list[PermissionToAssign]: + """Get_permissions_to_assign.""" + permissions = permission_set.split(",") + if permission_set == "all": + permissions = ["create", "read", "update", "delete"] + + permissions_to_assign: list[PermissionToAssign] = [] + + # we were thinking that if you can start an instance, you ought to be able to view your own instances. + if permission_set == "start": + target_uri = f"/process-instances/{process_related_path_segment}" + permissions_to_assign.append( + PermissionToAssign(permission="create", target_uri=target_uri) + ) + target_uri = f"/process-instances/for-me/{process_related_path_segment}" + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri=target_uri) + ) + + else: + if permission_set == "all": + for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL: + target_uri = ( + f"{path_segment_dict['path']}/{process_related_path_segment}" + ) + relevant_permissions = path_segment_dict["relevant_permissions"] + for permission in relevant_permissions: + permissions_to_assign.append( + PermissionToAssign( + permission=permission, target_uri=target_uri + ) + ) + + for target_uri in target_uris: + for permission in permissions: + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri=target_uri) + ) + + return permissions_to_assign + + @classmethod + def explode_permissions( + cls, permission_set: str, target: str + ) -> list[PermissionToAssign]: + """Explodes given permissions to and returns list of PermissionToAssign objects. + + These can be used to then iterate through and inserted into the database. + Target Macros: + ALL + * gives access to ALL api endpoints - useful to give admin-like permissions + PG:[process_group_identifier] + * affects given process-group and all sub process-groups and process-models + PM:[process_model_identifier] + * affects given process-model + BASIC + * Basic access to complete tasks and use the site + + Permission Macros: + all + * create, read, update, delete + start + * create process-instances (aka instantiate or start a process-model) + * only works with PG and PM target macros + """ + permissions_to_assign: list[PermissionToAssign] = [] + permissions = permission_set.split(",") + if permission_set == "all": + permissions = ["create", "read", "update", "delete"] + + if target.startswith("PG:"): + process_group_identifier = ( + target.removeprefix("PG:").replace("/", ":").removeprefix(":") + ) + process_related_path_segment = f"{process_group_identifier}:*" + if process_group_identifier == "ALL": + process_related_path_segment = "*" + target_uris = [ + f"/process-groups/{process_related_path_segment}", + f"/process-models/{process_related_path_segment}", + ] + permissions_to_assign = ( + permissions_to_assign + + cls.get_permissions_to_assign( + permission_set, process_related_path_segment, target_uris + ) + ) + + elif target.startswith("PM:"): + process_model_identifier = ( + target.removeprefix("PM:").replace("/", ":").removeprefix(":") + ) + process_related_path_segment = f"{process_model_identifier}/*" + + if process_model_identifier == "ALL": + process_related_path_segment = "*" + + target_uris = [f"/process-models/{process_related_path_segment}"] + permissions_to_assign = ( + permissions_to_assign + + cls.get_permissions_to_assign( + permission_set, process_related_path_segment, target_uris + ) + ) + + elif permission_set == "start": + raise InvalidPermissionError( + "Permission 'start' is only available for macros PM and PG." + ) + + elif target.startswith("BASIC"): + permissions_to_assign.append( + PermissionToAssign( + permission="read", target_uri="/process-instances/for-me" + ) + ) + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri="/processes") + ) + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri="/service-tasks") + ) + permissions_to_assign.append( + PermissionToAssign( + permission="read", target_uri="/user-groups/for-current-user" + ) + ) + + for permission in ["create", "read", "update", "delete"]: + permissions_to_assign.append( + PermissionToAssign( + permission=permission, target_uri="/process-instances/reports/*" + ) + ) + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri="/tasks/*") + ) + elif target == "ALL": + for permission in permissions: + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri="/*") + ) + elif target.startswith("/"): + for permission in permissions: + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri=target) + ) + else: + raise InvalidPermissionError( + f"Target uri '{target}' with permission set '{permission_set}' is" + " invalid. The target uri must either be a macro of PG, PM, BASIC, or" + " ALL or an api uri." + ) + + return permissions_to_assign + + @classmethod + def add_permission_from_uri_or_macro( + cls, group_identifier: str, permission: str, target: str + ) -> list[PermissionAssignmentModel]: + """Add_permission_from_uri_or_macro.""" + group = GroupService.find_or_create_group(group_identifier) + permissions_to_assign = cls.explode_permissions(permission, target) + permission_assignments = [] + for permission_to_assign in permissions_to_assign: + permission_target = cls.find_or_create_permission_target( + permission_to_assign.target_uri + ) + permission_assignments.append( + cls.create_permission_for_principal( + group.principal, permission_target, permission_to_assign.permission + ) + ) + return permission_assignments + + @classmethod + def refresh_permissions(cls, group_info: list[dict[str, Any]]) -> None: + """Adds new permission assignments and deletes old ones.""" + initial_permission_assignments = PermissionAssignmentModel.query.all() + result = cls.import_permissions_from_yaml_file() + desired_permission_assignments = result["permission_assignments"] + desired_group_identifiers = result["group_identifiers"] + + for group in group_info: + group_identifier = group["name"] + for username in group["users"]: + GroupService.add_user_to_group_or_add_to_waiting( + username, group_identifier + ) + desired_group_identifiers.add(group_identifier) + for permission in group["permissions"]: + for crud_op in permission["actions"]: + desired_permission_assignments.extend( + cls.add_permission_from_uri_or_macro( + group_identifier=group_identifier, + target=permission["uri"], + permission=crud_op, + ) + ) + desired_group_identifiers.add(group_identifier) + + for ipa in initial_permission_assignments: + if ipa not in desired_permission_assignments: + db.session.delete(ipa) + + groups_to_delete = GroupModel.query.filter( + GroupModel.identifier.not_in(desired_group_identifiers) + ).all() + for gtd in groups_to_delete: + db.session.delete(gtd) + db.session.commit() + class KeycloakAuthorization: """Interface with Keycloak server.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py index a2a9181d4..cb8b44c6d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py @@ -40,10 +40,9 @@ class FileSystemService: @staticmethod def root_path() -> str: """Root_path.""" - # fixme: allow absolute files dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] - app_root = current_app.root_path - return os.path.abspath(os.path.join(app_root, "..", dir_name)) + # ensure this is a string - thanks mypy... + return os.path.abspath(os.path.join(dir_name, "")) @staticmethod def id_string_to_relative_path(id_string: str) -> str: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py index 495603cf0..43c18edc6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py @@ -173,13 +173,15 @@ class GitService: if "repository" not in webhook or "clone_url" not in webhook["repository"]: raise InvalidGitWebhookBodyError( - f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}" + "Cannot find required keys of 'repository:clone_url' from webhook" + f" body: {webhook}" ) clone_url = webhook["repository"]["clone_url"] if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]: raise GitCloneUrlMismatchError( - f"Configured clone url does not match clone url from webhook: {clone_url}" + "Configured clone url does not match clone url from webhook:" + f" {clone_url}" ) if "ref" not in webhook: @@ -189,8 +191,8 @@ class GitService: if current_app.config["GIT_BRANCH"] is None: raise MissingGitConfigsError( - "Missing config for GIT_BRANCH. " - "This is required for updating the repository as a result of the webhook" + "Missing config for GIT_BRANCH. This is required for updating the" + " repository as a result of the webhook" ) ref = webhook["ref"] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py index aa560009e..911d41ac4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py @@ -4,6 +4,7 @@ from typing import Optional from flask_bpmn.models.db import db from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.user_service import UserService @@ -22,3 +23,15 @@ class GroupService: db.session.commit() UserService.create_principal(group.id, id_column_name="group_id") return group + + @classmethod + def add_user_to_group_or_add_to_waiting( + cls, username: str, group_identifier: str + ) -> None: + """Add_user_to_group_or_add_to_waiting.""" + group = cls.find_or_create_group(group_identifier) + user = UserModel.query.filter_by(username=username).first() + if user: + UserService.add_user_to_group(user, group) + else: + UserService.add_waiting_group_assignment(username, group) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index dd34cb3fd..599d5228d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -122,7 +122,8 @@ def setup_logger(app: Flask) -> None: if upper_log_level_string not in log_levels: raise InvalidLogLevelError( - f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}" + f"Log level given is invalid: '{upper_log_level_string}'. Valid options are" + f" {log_levels}" ) log_level = getattr(logging, upper_log_level_string) @@ -176,7 +177,8 @@ def setup_logger(app: Flask) -> None: spiff_logger = logging.getLogger("spiff") spiff_logger.setLevel(spiff_log_level) spiff_formatter = logging.Formatter( - "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s" + "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |" + " %(process)s | %(processName)s | %(process_instance_id)s" ) # if you add a handler to spiff, it will be used/inherited by spiff.metrics diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py index cfb42c836..b3d1e831f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py @@ -145,8 +145,11 @@ class MessageService: if process_instance_receive is None: raise MessageServiceError( ( - f"Process instance cannot be found for queued message: {message_instance_receive.id}." - f"Tried with id {message_instance_receive.process_instance_id}", + ( + "Process instance cannot be found for queued message:" + f" {message_instance_receive.id}.Tried with id" + f" {message_instance_receive.process_instance_id}" + ), ) ) @@ -182,7 +185,6 @@ class MessageService: ) for message_instance_receive in message_instances_receive: - # sqlalchemy supports select / where statements like active record apparantly # https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions message_correlation_select = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 24dbd497b..4a4f99a47 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -17,6 +17,7 @@ from typing import Optional from typing import Tuple from typing import TypedDict from typing import Union +from uuid import UUID import dateparser import pytz @@ -43,6 +44,9 @@ from SpiffWorkflow.spiff.serializer.task_spec_converters import ( CallActivityTaskConverter, ) from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter +from SpiffWorkflow.spiff.serializer.task_spec_converters import ( + EventBasedGatewayConverter, +) from SpiffWorkflow.spiff.serializer.task_spec_converters import ( IntermediateCatchEventConverter, ) @@ -151,6 +155,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore "time": time, "decimal": decimal, "_strptime": _strptime, + "enumerate": enumerate, + "list": list, + "map": map, } # This will overwrite the standard builtins @@ -209,14 +216,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore except Exception as exception: if task is None: raise ProcessInstanceProcessorError( - "Error evaluating expression: " - "'%s', exception: %s" % (expression, str(exception)), + "Error evaluating expression: '%s', exception: %s" + % (expression, str(exception)), ) from exception else: raise WorkflowTaskExecException( task, - "Error evaluating expression " - "'%s', %s" % (expression, str(exception)), + "Error evaluating expression '%s', %s" + % (expression, str(exception)), ) from exception def execute( @@ -263,6 +270,7 @@ class ProcessInstanceProcessor: EndEventConverter, IntermediateCatchEventConverter, IntermediateThrowEventConverter, + EventBasedGatewayConverter, ManualTaskConverter, NoneTaskConverter, ReceiveTaskConverter, @@ -276,6 +284,7 @@ class ProcessInstanceProcessor: ] ) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) + _event_serializer = EventBasedGatewayConverter() PROCESS_INSTANCE_ID_KEY = "process_instance_id" VALIDATION_PROCESS_KEY = "validate_only" @@ -292,9 +301,7 @@ class ProcessInstanceProcessor: tld.spiff_step = process_instance_model.spiff_step # we want this to be the fully qualified path to the process model including all group subcomponents - current_app.config[ - "THREAD_LOCAL_DATA" - ].process_model_identifier = ( + current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( f"{process_instance_model.process_model_identifier}" ) @@ -375,8 +382,10 @@ class ProcessInstanceProcessor: except MissingSpecError as ke: raise ApiError( error_code="unexpected_process_instance_structure", - message="Failed to deserialize process_instance" - " '%s' due to a mis-placed or missing task '%s'" + message=( + "Failed to deserialize process_instance" + " '%s' due to a mis-placed or missing task '%s'" + ) % (self.process_model_identifier, str(ke)), ) from ke @@ -392,7 +401,10 @@ class ProcessInstanceProcessor: raise ( ApiError( "process_model_not_found", - f"The given process model was not found: {process_model_identifier}.", + ( + "The given process model was not found:" + f" {process_model_identifier}." + ), ) ) spec_files = SpecFileService.get_files(process_model_info) @@ -522,8 +534,11 @@ class ProcessInstanceProcessor: potential_owner_ids.append(lane_owner_user.id) self.raise_if_no_potential_owners( potential_owner_ids, - f"No users found in task data lane owner list for lane: {task_lane}. " - f"The user list used: {task.data['lane_owners'][task_lane]}", + ( + "No users found in task data lane owner list for lane:" + f" {task_lane}. The user list used:" + f" {task.data['lane_owners'][task_lane]}" + ), ) else: group_model = GroupModel.query.filter_by(identifier=task_lane).first() @@ -573,12 +588,6 @@ class ProcessInstanceProcessor: ) return details_model - def save_spiff_step_details(self) -> None: - """SaveSpiffStepDetails.""" - details_model = self.spiff_step_details() - db.session.add(details_model) - db.session.commit() - def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: """Extract_metadata.""" metadata_extraction_paths = process_model_info.metadata_extraction_paths @@ -614,7 +623,7 @@ class ProcessInstanceProcessor: db.session.add(pim) db.session.commit() - def save(self) -> None: + def _save(self) -> None: """Saves the current state of this processor to the database.""" self.process_instance_model.bpmn_json = self.serialize() @@ -636,6 +645,9 @@ class ProcessInstanceProcessor: db.session.add(self.process_instance_model) db.session.commit() + def save(self) -> None: + """Saves the current state and moves on to the next state.""" + self._save() human_tasks = HumanTaskModel.query.filter_by( process_instance_id=self.process_instance_model.id ).all() @@ -704,6 +716,47 @@ class ProcessInstanceProcessor: db.session.add(at) db.session.commit() + def serialize_task_spec(self, task_spec: SpiffTask) -> Any: + """Get a serialized version of a task spec.""" + # The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask + # Not sure why mypy accepts this but whatever. + return self._serializer.spec_converter.convert(task_spec) + + def send_bpmn_event(self, event_data: dict[str, Any]) -> None: + """Send an event to the workflow.""" + payload = event_data.pop("payload", None) + event_definition = self._event_serializer.restore(event_data) + if payload is not None: + event_definition.payload = payload + current_app.logger.info( + f"Event of type {event_definition.event_type} sent to process instance" + f" {self.process_instance_model.id}" + ) + self.bpmn_process_instance.catch(event_definition) + self.do_engine_steps(save=True) + + def manual_complete_task(self, task_id: str, execute: bool) -> None: + """Mark the task complete optionally executing it.""" + spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) + if execute: + current_app.logger.info( + f"Manually executing Task {spiff_task.task_spec.name} of process" + f" instance {self.process_instance_model.id}" + ) + spiff_task.complete() + else: + current_app.logger.info( + f"Skipping Task {spiff_task.task_spec.name} of process instance" + f" {self.process_instance_model.id}" + ) + spiff_task._set_state(TaskState.COMPLETED) + for child in spiff_task.children: + child.task_spec._update(child) + self.bpmn_process_instance.last_task = spiff_task + self._save() + # Saving the workflow seems to reset the status + self.suspend() + @staticmethod def get_parser() -> MyCustomParser: """Get_parser.""" @@ -738,14 +791,13 @@ class ProcessInstanceProcessor: """Bpmn_file_full_path_from_bpmn_process_identifier.""" if bpmn_process_identifier is None: raise ValueError( - "bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None" + "bpmn_file_full_path_from_bpmn_process_identifier:" + " bpmn_process_identifier is unexpectedly None" ) - spec_reference = ( - SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier) - .filter_by(type="process") - .first() - ) + spec_reference = SpecReferenceCache.query.filter_by( + identifier=bpmn_process_identifier, type="process" + ).first() bpmn_file_full_path = None if spec_reference is None: bpmn_file_full_path = ( @@ -762,7 +814,10 @@ class ProcessInstanceProcessor: raise ( ApiError( error_code="could_not_find_bpmn_process_identifier", - message="Could not find the the given bpmn process identifier from any sources: %s" + message=( + "Could not find the the given bpmn process identifier from any" + " sources: %s" + ) % bpmn_process_identifier, ) ) @@ -786,7 +841,6 @@ class ProcessInstanceProcessor: new_bpmn_files = set() for bpmn_process_identifier in processor_dependencies_new: - # ignore identifiers that spiff already knows about if bpmn_process_identifier in bpmn_process_identifiers_in_parser: continue @@ -829,7 +883,10 @@ class ProcessInstanceProcessor: raise ( ApiError( error_code="no_primary_bpmn_error", - message="There is no primary BPMN process id defined for process_model %s" + message=( + "There is no primary BPMN process id defined for" + " process_model %s" + ) % process_model_info.id, ) ) @@ -890,7 +947,10 @@ class ProcessInstanceProcessor: if not bpmn_message.correlations: raise ApiError( "message_correlations_missing", - f"Could not find any message correlations bpmn_message: {bpmn_message.name}", + ( + "Could not find any message correlations bpmn_message:" + f" {bpmn_message.name}" + ), ) message_correlations = [] @@ -910,12 +970,16 @@ class ProcessInstanceProcessor: if message_correlation_property is None: raise ApiError( "message_correlations_missing_from_process", - "Could not find a known message correlation with identifier:" - f"{message_correlation_property_identifier}", + ( + "Could not find a known message correlation with" + f" identifier:{message_correlation_property_identifier}" + ), ) message_correlations.append( { - "message_correlation_property": message_correlation_property, + "message_correlation_property": ( + message_correlation_property + ), "name": message_correlation_key, "value": message_correlation_property_value, } @@ -972,7 +1036,10 @@ class ProcessInstanceProcessor: if message_model is None: raise ApiError( "invalid_message_name", - f"Invalid message name: {waiting_task.task_spec.event_definition.name}.", + ( + "Invalid message name:" + f" {waiting_task.task_spec.event_definition.name}." + ), ) # Ensure we are only creating one message instance for each waiting message @@ -1186,9 +1253,13 @@ class ProcessInstanceProcessor: self.increment_spiff_step() self.bpmn_process_instance.complete_task_from_id(task.id) human_task.completed_by_user_id = user.id + human_task.completed = True db.session.add(human_task) - db.session.commit() - self.save_spiff_step_details() + details_model = self.spiff_step_details() + db.session.add(details_model) + + # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) + self.save() def get_data(self) -> dict[str, Any]: """Get_data.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 82a35fc5c..cd20b9b57 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,6 +1,7 @@ """Process_instance_report_service.""" import re from dataclasses import dataclass +from typing import Any from typing import Optional import sqlalchemy @@ -84,29 +85,8 @@ class ProcessInstanceReportService: """ProcessInstanceReportService.""" @classmethod - def report_with_identifier( - cls, - user: UserModel, - report_id: Optional[int] = None, - report_identifier: Optional[str] = None, - ) -> ProcessInstanceReportModel: - """Report_with_filter.""" - if report_id is not None: - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, created_by_id=user.id - ).first() - if process_instance_report is not None: - return process_instance_report # type: ignore - - if report_identifier is None: - report_identifier = "default" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, created_by_id=user.id - ).first() - - if process_instance_report is not None: - return process_instance_report # type: ignore - + def system_metadata_map(cls, metadata_key: str) -> dict[str, Any]: + """System_metadata_map.""" # TODO replace with system reports that are loaded on launch (or similar) temp_system_metadata_map = { "default": { @@ -151,10 +131,36 @@ class ProcessInstanceReportService: "order_by": ["-start_in_seconds", "-id"], }, } + return temp_system_metadata_map[metadata_key] + + @classmethod + def report_with_identifier( + cls, + user: UserModel, + report_id: Optional[int] = None, + report_identifier: Optional[str] = None, + ) -> ProcessInstanceReportModel: + """Report_with_filter.""" + if report_id is not None: + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, created_by_id=user.id + ).first() + if process_instance_report is not None: + return process_instance_report # type: ignore + + if report_identifier is None: + report_identifier = "default" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, created_by_id=user.id + ).first() + + if process_instance_report is not None: + return process_instance_report # type: ignore + process_instance_report = ProcessInstanceReportModel( identifier=report_identifier, created_by_id=user.id, - report_metadata=temp_system_metadata_map[report_identifier], + report_metadata=cls.system_metadata_map(report_identifier), ) return process_instance_report # type: ignore @@ -283,9 +289,9 @@ class ProcessInstanceReportService: process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: if metadata_column["accessor"] not in process_instance_dict: - process_instance_dict[ - metadata_column["accessor"] - ] = process_instance[metadata_column["accessor"]] + process_instance_dict[metadata_column["accessor"]] = ( + process_instance[metadata_column["accessor"]] + ) results.append(process_instance_dict) return results @@ -414,13 +420,16 @@ class ProcessInstanceReportService: ) if report_filter.with_tasks_assigned_to_my_group is True: - group_model_join_conditions = [GroupModel.id == HumanTaskModel.lane_assignment_id] + group_model_join_conditions = [ + GroupModel.id == HumanTaskModel.lane_assignment_id + ] if report_filter.user_group_identifier: - group_model_join_conditions.append(GroupModel.identifier == report_filter.user_group_identifier) + group_model_join_conditions.append( + GroupModel.identifier == report_filter.user_group_identifier + ) process_instance_query = process_instance_query.join(HumanTaskModel) process_instance_query = process_instance_query.join( - GroupModel, - and_(*group_model_join_conditions) + GroupModel, and_(*group_model_join_conditions) ) process_instance_query = process_instance_query.join( UserGroupAssignmentModel, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index e933eda91..c6e3db42e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -17,6 +17,7 @@ from spiffworkflow_backend.models.task import MultiInstanceType from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, @@ -36,7 +37,10 @@ class ProcessInstanceService: user: UserModel, ) -> ProcessInstanceModel: """Get_process_instance_from_spec.""" - current_git_revision = GitService.get_current_revision() + try: + current_git_revision = GitService.get_current_revision() + except GitCommandError: + current_git_revision = "" process_instance_model = ProcessInstanceModel( status=ProcessInstanceStatus.not_started.value, process_initiator=user, @@ -81,7 +85,8 @@ class ProcessInstanceService: db.session.add(process_instance) db.session.commit() error_message = ( - f"Error running waiting task for process_instance {process_instance.id}" + "Error running waiting task for process_instance" + f" {process_instance.id}" + f"({process_instance.process_model_identifier}). {str(e)}" ) current_app.logger.error(error_message) @@ -121,7 +126,7 @@ class ProcessInstanceService: if next_task_trying_again is not None: process_instance_api.next_task = ( ProcessInstanceService.spiff_task_to_api_task( - next_task_trying_again, add_docs_and_forms=True + processor, next_task_trying_again, add_docs_and_forms=True ) ) @@ -174,7 +179,10 @@ class ProcessInstanceService: else: raise ApiError.from_task( error_code="task_lane_user_error", - message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." + message=( + "Spiff Task %s lane user dict must have a key called" + " 'value' with the user's uid in it." + ) % spiff_task.task_spec.name, task=spiff_task, ) @@ -277,7 +285,9 @@ class ProcessInstanceService: @staticmethod def spiff_task_to_api_task( - spiff_task: SpiffTask, add_docs_and_forms: bool = False + processor: ProcessInstanceProcessor, + spiff_task: SpiffTask, + add_docs_and_forms: bool = False, ) -> Task: """Spiff_task_to_api_task.""" task_type = spiff_task.task_spec.spec_type @@ -311,6 +321,8 @@ class ProcessInstanceService: if spiff_task.parent: parent_id = spiff_task.parent.id + serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec) + task = Task( spiff_task.id, spiff_task.task_spec.name, @@ -324,6 +336,7 @@ class ProcessInstanceService: process_identifier=spiff_task.task_spec._wf_spec.name, properties=props, parent=parent_id, + event_definition=serialized_task_spec.get("event_definition"), call_activity_process_identifier=call_activity_process_identifier, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py index 67be986e1..8fa25bc08 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py @@ -13,6 +13,8 @@ from flask_bpmn.api.api_error import ApiError from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) +from spiffworkflow_backend.interfaces import ProcessGroupLite +from spiffworkflow_backend.interfaces import ProcessGroupLitesWithCache from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_group import ProcessGroupSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -146,7 +148,10 @@ class ProcessModelService(FileSystemService): if len(instances) > 0: raise ApiError( error_code="existing_instances", - message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", + message=( + f"We cannot delete the model `{process_model_id}`, there are" + " existing instances that depend on it." + ), ) process_model = self.get_process_model(process_model_id) path = self.workflow_path(process_model) @@ -224,31 +229,46 @@ class ProcessModelService(FileSystemService): new_process_model_list = [] for process_model in process_models: uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}" - result = AuthorizationService.user_has_permission( + has_permission = AuthorizationService.user_has_permission( user=user, permission="create", target_uri=uri ) - if result: + if has_permission: new_process_model_list.append(process_model) return new_process_model_list return process_models @classmethod - def get_parent_group_array(cls, process_identifier: str) -> list[dict]: + def get_parent_group_array_and_cache_it( + cls, process_identifier: str, process_group_cache: dict[str, ProcessGroup] + ) -> ProcessGroupLitesWithCache: """Get_parent_group_array.""" full_group_id_path = None - parent_group_array = [] + parent_group_array: list[ProcessGroupLite] = [] for process_group_id_segment in process_identifier.split("/")[0:-1]: if full_group_id_path is None: full_group_id_path = process_group_id_segment else: full_group_id_path = os.path.join(full_group_id_path, process_group_id_segment) # type: ignore - parent_group = ProcessModelService.get_process_group(full_group_id_path) + parent_group = process_group_cache.get(full_group_id_path, None) + if parent_group is None: + parent_group = ProcessModelService.get_process_group(full_group_id_path) + if parent_group: + if full_group_id_path not in process_group_cache: + process_group_cache[full_group_id_path] = parent_group parent_group_array.append( {"id": parent_group.id, "display_name": parent_group.display_name} ) - return parent_group_array + return {"cache": process_group_cache, "process_groups": parent_group_array} + + @classmethod + def get_parent_group_array(cls, process_identifier: str) -> list[ProcessGroupLite]: + """Get_parent_group_array.""" + parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it( + process_identifier, {} + ) + return parent_group_lites_with_cache["process_groups"] @classmethod def get_process_groups( @@ -339,8 +359,11 @@ class ProcessModelService(FileSystemService): if len(problem_models) > 0: raise ApiError( error_code="existing_instances", - message=f"We cannot delete the group `{process_group_id}`, " - f"there are models with existing instances inside the group. {problem_models}", + message=( + f"We cannot delete the group `{process_group_id}`, there are" + " models with existing instances inside the group." + f" {problem_models}" + ), ) shutil.rmtree(path) self.cleanup_process_group_display_order() @@ -392,7 +415,10 @@ class ProcessModelService(FileSystemService): if process_group is None: raise ApiError( error_code="process_group_could_not_be_loaded_from_disk", - message=f"We could not load the process_group from disk from: {dir_path}", + message=( + "We could not load the process_group from disk from:" + f" {dir_path}" + ), ) else: process_group_id = dir_path.replace(FileSystemService.root_path(), "") @@ -457,7 +483,10 @@ class ProcessModelService(FileSystemService): if process_model_info is None: raise ApiError( error_code="process_model_could_not_be_loaded_from_disk", - message=f"We could not load the process_model from disk with data: {data}", + message=( + "We could not load the process_model from disk with data:" + f" {data}" + ), ) else: if name is None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py index 9112e20f0..ed331672c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py @@ -112,7 +112,10 @@ class ScriptUnitTestRunner: except json.decoder.JSONDecodeError as ex: return ScriptUnitTestResult( result=False, - error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}", + error=( + "Failed to parse expectedOutputJson:" + f" {unit_test['expectedOutputJson']}: {str(ex)}" + ), ) script = task.task_spec.script diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py index e4dee4913..aa9e6d147 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py @@ -44,8 +44,10 @@ class SecretService: except Exception as e: raise ApiError( error_code="create_secret_error", - message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. " - f"Original error is {e}", + message=( + f"There was an error creating a secret with key: {key} and value" + f" ending with: {value[:-4]}. Original error is {e}" + ), ) from e return secret_model @@ -89,7 +91,9 @@ class SecretService: else: raise ApiError( error_code="update_secret_error", - message=f"Cannot update secret with key: {key}. Resource does not exist.", + message=( + f"Cannot update secret with key: {key}. Resource does not exist." + ), status_code=404, ) @@ -104,11 +108,16 @@ class SecretService: except Exception as e: raise ApiError( error_code="delete_secret_error", - message=f"Could not delete secret with key: {key}. Original error is: {e}", + message=( + f"Could not delete secret with key: {key}. Original error" + f" is: {e}" + ), ) from e else: raise ApiError( error_code="delete_secret_error", - message=f"Cannot delete secret with key: {key}. Resource does not exist.", + message=( + f"Cannot delete secret with key: {key}. Resource does not exist." + ), status_code=404, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py index 72f59d1f7..4fdfbd6d1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py @@ -192,7 +192,8 @@ class SpecFileService(FileSystemService): full_file_path = SpecFileService.full_file_path(process_model_info, file_name) if not os.path.exists(full_file_path): raise ProcessModelFileNotFoundError( - f"No file found with name {file_name} in {process_model_info.display_name}" + f"No file found with name {file_name} in" + f" {process_model_info.display_name}" ) with open(full_file_path, "rb") as f_handle: spec_file_data = f_handle.read() @@ -314,8 +315,9 @@ class SpecFileService(FileSystemService): ).first() if message_model is None: raise ValidationException( - f"Could not find message model with identifier '{message_model_identifier}'" - f"Required by a Start Event in : {ref.file_name}" + "Could not find message model with identifier" + f" '{message_model_identifier}'Required by a Start Event in :" + f" {ref.file_name}" ) message_triggerable_process_model = ( MessageTriggerableProcessModel.query.filter_by( @@ -335,7 +337,8 @@ class SpecFileService(FileSystemService): != ref.process_model_id ): raise ValidationException( - f"Message model is already used to start process model {ref.process_model_id}" + "Message model is already used to start process model" + f" {ref.process_model_id}" ) @staticmethod @@ -353,8 +356,9 @@ class SpecFileService(FileSystemService): ).first() if message_model is None: raise ValidationException( - f"Could not find message model with identifier '{message_model_identifier}'" - f"specified by correlation property: {cpre}" + "Could not find message model with identifier" + f" '{message_model_identifier}'specified by correlation" + f" property: {cpre}" ) # fixme: I think we are currently ignoring the correction properties. message_correlation_property = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py index 0e6cf1cbe..20412e549 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py @@ -13,6 +13,9 @@ from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.models.user_group_assignment_waiting import ( + UserGroupAssignmentWaitingModel, +) class UserService: @@ -21,11 +24,11 @@ class UserService: @classmethod def create_user( cls, + username: str, service: str, service_id: str, - name: Optional[str] = "", - username: Optional[str] = "", email: Optional[str] = "", + display_name: Optional[str] = "", ) -> UserModel: """Create_user.""" user_model: Optional[UserModel] = ( @@ -41,8 +44,8 @@ class UserService: username=username, service=service, service_id=service_id, - name=name, email=email, + display_name=display_name, ) db.session.add(user_model) @@ -55,6 +58,7 @@ class UserService: message=f"Could not add user {username}", ) from e cls.create_principal(user_model.id) + UserService().apply_waiting_group_assignments(user_model) return user_model else: @@ -69,45 +73,12 @@ class UserService: ) ) - @classmethod - def find_or_create_user( - cls, - service: str, - service_id: str, - name: Optional[str] = None, - username: Optional[str] = None, - email: Optional[str] = None, - ) -> UserModel: - """Find_or_create_user.""" - user_model: UserModel - try: - user_model = cls.create_user( - service=service, - service_id=service_id, - name=name, - username=username, - email=email, - ) - except ApiError: - user_model = ( - UserModel.query.filter(UserModel.service == service) - .filter(UserModel.service_id == service_id) - .first() - ) - return user_model - # Returns true if the current user is logged in. @staticmethod def has_user() -> bool: """Has_user.""" return "token" in g and bool(g.token) and "user" in g and bool(g.user) - # Returns true if the given user uid is different from the current user's uid. - @staticmethod - def is_different_user(uid: str) -> bool: - """Is_different_user.""" - return UserService.has_user() and uid is not None and uid is not g.user.uid - @staticmethod def current_user() -> Any: """Current_user.""" @@ -117,20 +88,6 @@ class UserService: ) return g.user - @staticmethod - def in_list(uids: list[str]) -> bool: - """Returns true if the current user's id is in the given list of ids. - - False if there is no user, or the user is not in the list. - """ - if ( - UserService.has_user() - ): # If someone is logged in, lock tasks that don't belong to them. - user = UserService.current_user() - if user.uid in uids: - return True - return False - @staticmethod def get_principal_by_user_id(user_id: int) -> PrincipalModel: """Get_principal_by_user_id.""" @@ -173,8 +130,57 @@ class UserService: @classmethod def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None: """Add_user_to_group.""" - ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) - db.session.add(ugam) + exists = ( + UserGroupAssignmentModel() + .query.filter_by(user_id=user.id) + .filter_by(group_id=group.id) + .count() + ) + if not exists: + ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) + db.session.add(ugam) + db.session.commit() + + @classmethod + def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None: + """Add_waiting_group_assignment.""" + wugam = ( + UserGroupAssignmentWaitingModel() + .query.filter_by(username=username) + .filter_by(group_id=group.id) + .first() + ) + if not wugam: + wugam = UserGroupAssignmentWaitingModel( + username=username, group_id=group.id + ) + db.session.add(wugam) + db.session.commit() + if wugam.is_match_all(): + for user in UserModel.query.all(): + cls.add_user_to_group(user, group) + + @classmethod + def apply_waiting_group_assignments(cls, user: UserModel) -> None: + """Apply_waiting_group_assignments.""" + waiting = ( + UserGroupAssignmentWaitingModel() + .query.filter(UserGroupAssignmentWaitingModel.username == user.username) + .all() + ) + for assignment in waiting: + cls.add_user_to_group(user, assignment.group) + db.session.delete(assignment) + wildcard = ( + UserGroupAssignmentWaitingModel() + .query.filter( + UserGroupAssignmentWaitingModel.username + == UserGroupAssignmentWaitingModel.MATCH_ALL_USERS + ) + .all() + ) + for assignment in wildcard: + cls.add_user_to_group(user, assignment.group) db.session.commit() @staticmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn b/spiffworkflow-backend/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn new file mode 100644 index 000000000..2e33d429b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn @@ -0,0 +1,45 @@ + + + + + Flow_0gixxkm + + + + + + + + + + Flow_0gixxkm + Flow_1oi9nsn + + + Flow_1oi9nsn + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json new file mode 100644 index 000000000..ae61e4963 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json @@ -0,0 +1,6 @@ +{ + "title": "{FORM_IDENTIFIER}", + "description": "", + "properties": {}, + "required": [] +} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json new file mode 100644 index 000000000..654ce121f --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json @@ -0,0 +1,3 @@ +{ + "ui:order": [] +} diff --git a/spiffworkflow-backend/tests/data/data_object_test/data_object.bpmn b/spiffworkflow-backend/tests/data/data_object_test/data_object.bpmn new file mode 100644 index 000000000..c112339e2 --- /dev/null +++ b/spiffworkflow-backend/tests/data/data_object_test/data_object.bpmn @@ -0,0 +1,75 @@ + + + + + Flow_0hnphp9 + + + + Flow_0hnphp9 + Flow_0amajxh + + DataObjectReference_10g8dit + + the_data_object_var = 'hey' + + + + Flow_1ifqo6o + + + + Flow_0amajxh + Flow_1ifqo6o + + + DataObjectReference_10g8dit + Property_0a8w16m + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/error/script_error_with_task_data.bpmn b/spiffworkflow-backend/tests/data/error/script_error_with_task_data.bpmn new file mode 100644 index 000000000..cd5f58aa4 --- /dev/null +++ b/spiffworkflow-backend/tests/data/error/script_error_with_task_data.bpmn @@ -0,0 +1,86 @@ + + + + + Flow_10jwwqy + + + + Flow_1axnzv6 + + + + + + { + "current_user": { + "id": "2", + "username": "ciadmin1" + }, + "num": 0 +} + { + "Mike": "Awesome", + "i": 2, + "current_user": { + "id": "2", + "username": "ciadmin1" + }, + "num": 0, + "my_var": "whatwhat", + "person": "Kevin" +} + + + {} + {} + + + {"current_user": {"id": "1", "username": "kb"}} + {"Mike": "Awesome", "current_user": {"id": "1", "username": "kb"}, "heyhey": "https://demo.spiffworkflow.org", "i": 2, "members": [], "my_var": "whatwhat", "person": "Kevin"} + + + + Flow_10jwwqy + Flow_1utkzvj + my_var = 'THE VAR' + + + + + Flow_1utkzvj + Flow_1axnzv6 + hey + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn new file mode 100644 index 000000000..9f2f26bf4 --- /dev/null +++ b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn @@ -0,0 +1,137 @@ + + + + + + + + Flow_1l15rbh + + + + Flow_1l15rbh + Flow_0d35i06 + Flow_0tzaigt + Flow_1vld4r2 + + + + Flow_0d35i06 + Flow_1w3n49n + + + + Flow_0tzaigt + Flow_1q47ol8 + + + + + + + Flow_1q47ol8 + + + + + Flow_1w3n49n + + + + Flow_1vld4r2 + Flow_13ai5vv + + timedelta(hours=1) + + + + + Click the button. + + Flow_13ai5vv + Flow_1vwnf3n + + + Flow_1vwnf3n + + + + + result + + + + + result + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/script_refresh_permissions/refresh_permisions.bpmn b/spiffworkflow-backend/tests/data/script_refresh_permissions/refresh_permisions.bpmn new file mode 100644 index 000000000..630cd1221 --- /dev/null +++ b/spiffworkflow-backend/tests/data/script_refresh_permissions/refresh_permisions.bpmn @@ -0,0 +1,39 @@ + + + + + Flow_01cweoc + + + + Flow_1xle2yo + + + + Flow_01cweoc + Flow_1xle2yo + refresh_permissions([]) + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 52f1889e9..47cf2d876 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -41,7 +41,7 @@ class BaseTest: if isinstance(user, UserModel): return user - user = UserService.create_user("internal", username, username=username) + user = UserService.create_user(username, "internal", username) if isinstance(user, UserModel): return user @@ -133,7 +133,6 @@ class BaseTest: ) -> TestResponse: """Create_process_model.""" if process_model_id is not None: - # make sure we have a group process_group_id, _ = os.path.split(process_model_id) modified_process_group_id = process_group_id.replace("/", ":") @@ -141,7 +140,6 @@ class BaseTest: os.path.join(FileSystemService.root_path(), process_group_id) ) if ProcessModelService.is_group(process_group_path): - if exception_notification_addresses is None: exception_notification_addresses = [] @@ -171,7 +169,8 @@ class BaseTest: raise Exception("You must create the group first") else: raise Exception( - "You must include the process_model_id, which must be a path to the model" + "You must include the process_model_id, which must be a path to the" + " model" ) def get_test_data_file_contents( @@ -324,13 +323,9 @@ class BaseTest: permission_names: Optional[list[str]] = None, ) -> UserModel: """Add_permissions_to_user.""" - permission_target = PermissionTargetModel.query.filter_by( - uri=target_uri - ).first() - if permission_target is None: - permission_target = PermissionTargetModel(uri=target_uri) - db.session.add(permission_target) - db.session.commit() + permission_target = AuthorizationService.find_or_create_permission_target( + target_uri + ) if permission_names is None: permission_names = [member.name for member in Permission] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py index 20a0bb67b..ce1655cb9 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py @@ -1,4 +1,7 @@ """Test_authentication.""" +import base64 + +import jwt from flask import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest @@ -44,13 +47,16 @@ class TestFlaskOpenId(BaseTest): client: FlaskClient, with_db_and_bpmn_file_cleanup: None, ) -> None: + """Test_get_token.""" + code = "testadmin1:1234123412341234" + """It should be possible to get a token.""" - code = ( - "c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx" - ) + backend_basic_auth_string = code + backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") + backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { "Content-Type": "application/x-www-form-urlencoded", - "Authorization": f"Basic {code}", + "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}", } data = { "grant_type": "authorization_code", @@ -59,3 +65,13 @@ class TestFlaskOpenId(BaseTest): } response = client.post("/openid/token", data=data, headers=headers) assert response + assert response.is_json + assert "access_token" in response.json + assert "id_token" in response.json + assert "refresh_token" in response.json + + decoded_token = jwt.decode( + response.json["id_token"], options={"verify_signature": False} + ) + assert "iss" in decoded_token + assert "email" in decoded_token diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index adc21c29f..ef34fe060 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -4,6 +4,7 @@ import json import os import time from typing import Any +from typing import Dict import pytest from flask.app import Flask @@ -162,6 +163,83 @@ class TestProcessApi(BaseTest): assert process_model.primary_file_name == bpmn_file_name assert process_model.primary_process_id == "sample" + def test_process_model_create_with_natural_language( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_model_create_with_natural_language.""" + process_group_id = "test_process_group" + process_group_description = "Test Process Group" + process_model_id = "sample" + process_model_identifier = f"{process_group_id}/{process_model_id}" + self.create_process_group( + client, with_super_admin_user, process_group_id, process_group_description + ) + + text = "Create a Bug Tracker process model " + text += ( + "with a Bug Details form that collects summary, description, and priority" + ) + body = {"natural_language_text": text} + self.create_process_model_with_api( + client, + process_model_id=process_model_identifier, + user=with_super_admin_user, + ) + response = client.post( + f"/v1.0/process-models-natural-language/{process_group_id}", + content_type="application/json", + data=json.dumps(body), + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 201 + assert response.json is not None + assert response.json["id"] == f"{process_group_id}/bug-tracker" + assert response.json["display_name"] == "Bug Tracker" + assert response.json["metadata_extraction_paths"] == [ + {"key": "summary", "path": "summary"}, + {"key": "description", "path": "description"}, + {"key": "priority", "path": "priority"}, + ] + + process_model = ProcessModelService.get_process_model(response.json["id"]) + process_model_path = os.path.join( + FileSystemService.root_path(), + FileSystemService.id_string_to_relative_path(process_model.id), + ) + + process_model_diagram = os.path.join(process_model_path, "bug-tracker.bpmn") + assert os.path.exists(process_model_diagram) + form_schema_json = os.path.join(process_model_path, "bug-details-schema.json") + assert os.path.exists(form_schema_json) + form_uischema_json = os.path.join( + process_model_path, "bug-details-uischema.json" + ) + assert os.path.exists(form_uischema_json) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier="bug-tracker" + ).first() + assert process_instance_report is not None + report_column_accessors = [ + i["accessor"] for i in process_instance_report.report_metadata["columns"] + ] + expected_column_accessors = [ + "id", + "process_model_display_name", + "start_in_seconds", + "end_in_seconds", + "username", + "status", + "summary", + "description", + "priority", + ] + assert report_column_accessors == expected_column_accessors + def test_primary_process_id_updates_via_xml( self, app: Flask, @@ -249,10 +327,6 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.json["ok"] is True - # assert we no longer have a model - with pytest.raises(ProcessEntityNotFoundError): - ProcessModelService.get_process_model(process_model_identifier) - def test_process_model_delete_with_instances( self, app: Flask, @@ -304,7 +378,8 @@ class TestProcessApi(BaseTest): assert data["error_code"] == "existing_instances" assert ( data["message"] - == f"We cannot delete the model `{process_model_identifier}`, there are existing instances that depend on it." + == f"We cannot delete the model `{process_model_identifier}`, there are" + " existing instances that depend on it." ) def test_process_model_update( @@ -2019,7 +2094,6 @@ class TestProcessApi(BaseTest): mail = app.config["MAIL_APP"] with mail.record_messages() as outbox: - response = client.post( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), @@ -2041,6 +2115,36 @@ class TestProcessApi(BaseTest): assert process is not None assert process.status == "error" + def test_task_data_is_set_even_if_process_instance_errors( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_task_data_is_set_even_if_process_instance_errors.""" + process_model = load_test_spec( + process_model_id="group/error_with_task_data", + bpmn_file_name="script_error_with_task_data.bpmn", + process_model_source_directory="error", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + response = client.post( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 400 + assert process_instance.status == "error" + processor = ProcessInstanceProcessor(process_instance) + spiff_task = processor.get_task_by_bpmn_identifier( + "script_task_one", processor.bpmn_process_instance + ) + assert spiff_task is not None + assert spiff_task.data != {} + def test_process_model_file_create( self, app: Flask, @@ -2507,6 +2611,148 @@ class TestProcessApi(BaseTest): print("test_script_unit_test_run") + def test_send_event( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_script_unit_test_run.""" + process_group_id = "test_group" + process_model_id = "process_navigation" + bpmn_file_name = "process_navigation.bpmn" + bpmn_file_location = "process_navigation" + process_model_identifier = self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id=process_group_id, + process_model_id=process_model_id, + bpmn_file_name=bpmn_file_name, + bpmn_file_location=bpmn_file_location, + ) + + bpmn_file_data_bytes = self.get_test_data_file_contents( + bpmn_file_name, bpmn_file_location + ) + self.create_spec_file( + client=client, + process_model_id=process_model_identifier, + process_model_location=process_model_identifier, + file_name=bpmn_file_name, + file_data=bpmn_file_data_bytes, + user=with_super_admin_user, + ) + + headers = self.logged_in_headers(with_super_admin_user) + response = self.create_process_instance_from_process_model_id_with_api( + client, process_model_identifier, headers + ) + process_instance_id = response.json["id"] + + client.post( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + + # This is exactly the same the test above, but some reason I to a totally irrelevant type. + data: Dict = { + "correlation_properties": [], + "expression": None, + "external": True, + "internal": False, + "payload": {"message": "message 1"}, + "name": "Message 1", + "typename": "MessageEventDefinition", + } + response = client.post( + f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + content_type="application/json", + data=json.dumps(data), + ) + assert response.json["status"] == "complete" + + response = client.get( + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?all_tasks=true", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + end = next(task for task in response.json if task["name"] == "End") + assert end["data"]["result"] == {"message": "message 1"} + + def test_manual_complete_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_script_unit_test_run.""" + process_group_id = "test_group" + process_model_id = "process_navigation" + bpmn_file_name = "process_navigation.bpmn" + bpmn_file_location = "process_navigation" + process_model_identifier = self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id=process_group_id, + process_model_id=process_model_id, + bpmn_file_name=bpmn_file_name, + bpmn_file_location=bpmn_file_location, + ) + + bpmn_file_data_bytes = self.get_test_data_file_contents( + bpmn_file_name, bpmn_file_location + ) + self.create_spec_file( + client=client, + process_model_id=process_model_identifier, + process_model_location=process_model_identifier, + file_name=bpmn_file_name, + file_data=bpmn_file_data_bytes, + user=with_super_admin_user, + ) + + headers = self.logged_in_headers(with_super_admin_user) + response = self.create_process_instance_from_process_model_id_with_api( + client, process_model_identifier, headers + ) + process_instance_id = response.json["id"] + + client.post( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + + data = { + "dateTime": "timedelta(hours=1)", + "external": True, + "internal": True, + "label": "Event_0e4owa3", + "typename": "TimerEventDefinition", + } + response = client.post( + f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + content_type="application/json", + data=json.dumps(data), + ) + + response = client.get( + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert len(response.json) == 1 + task = response.json[0] + + response = client.post( + f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}", + headers=self.logged_in_headers(with_super_admin_user), + content_type="application/json", + ) + assert response.json["status"] == "suspended" + def setup_initial_groups_for_move_tests( self, client: FlaskClient, with_super_admin_user: UserModel ) -> None: @@ -2750,7 +2996,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id="save_process_instance_metadata/save_process_instance_metadata", + process_model_id=( + "save_process_instance_metadata/save_process_instance_metadata" + ), bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) @@ -2807,7 +3055,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id="save_process_instance_metadata/save_process_instance_metadata", + process_model_id=( + "save_process_instance_metadata/save_process_instance_metadata" + ), bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) @@ -2928,3 +3178,31 @@ class TestProcessApi(BaseTest): assert len(response.json["results"]) == 2 assert response.json["results"][1]["id"] == process_instance_one.id assert response.json["results"][0]["id"] == process_instance_two.id + + def test_process_data_show( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_data_show.""" + process_model = load_test_spec( + "test_group/data_object_test", + process_model_source_directory="data_object_test", + ) + process_instance_one = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance_one) + processor.do_engine_steps(save=True) + assert process_instance_one.status == "user_input_required" + + response = client.get( + f"/v1.0/process-data/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance_one.id}/the_data_object_var", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert response.json["process_data_value"] == "hey" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_users_controller.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_users_controller.py new file mode 100644 index 000000000..c1c62705f --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_users_controller.py @@ -0,0 +1,47 @@ +"""Test_users_controller.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.user import UserModel + + +class TestUsersController(BaseTest): + """TestUsersController.""" + + def test_user_search_returns_a_user( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_user_search_returns_a_user.""" + self.find_or_create_user(username="aa") + self.find_or_create_user(username="ab") + self.find_or_create_user(username="abc") + self.find_or_create_user(username="ac") + + self._assert_search_has_count(client, with_super_admin_user, "aa", 1) + self._assert_search_has_count(client, with_super_admin_user, "ab", 2) + self._assert_search_has_count(client, with_super_admin_user, "ac", 1) + self._assert_search_has_count(client, with_super_admin_user, "ad", 0) + self._assert_search_has_count(client, with_super_admin_user, "a", 4) + + def _assert_search_has_count( + self, + client: FlaskClient, + with_super_admin_user: UserModel, + username_prefix: str, + expected_count: int, + ) -> None: + """_assert_search_has_count.""" + response = client.get( + f"/v1.0/users/search?username_prefix={username_prefix}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + assert response.json + assert response.json["users"] is not None + assert response.json["username_prefix"] == username_prefix + assert len(response.json["users"]) == expected_count diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py new file mode 100644 index 000000000..cbf625168 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py @@ -0,0 +1,60 @@ +"""Test_get_localtime.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.scripts.get_all_permissions import GetAllPermissions +from spiffworkflow_backend.services.authorization_service import AuthorizationService + + +class TestGetAllPermissions(BaseTest): + """TestGetAllPermissions.""" + + def test_can_get_all_permissions( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_all_permissions.""" + self.find_or_create_user("test_user") + + # now that we have everything, try to clear it out... + script_attributes_context = ScriptAttributesContext( + task=None, + environment_identifier="testing", + process_instance_id=1, + process_model_identifier="my_test_user", + ) + AuthorizationService.add_permission_from_uri_or_macro( + permission="start", target="PG:hey:group", group_identifier="my_test_group" + ) + AuthorizationService.add_permission_from_uri_or_macro( + permission="all", target="/tasks", group_identifier="my_test_group" + ) + + expected_permissions = [ + { + "group_identifier": "my_test_group", + "uri": "/process-instances/hey:group:*", + "permissions": ["create"], + }, + { + "group_identifier": "my_test_group", + "uri": "/process-instances/for-me/hey:group:*", + "permissions": ["read"], + }, + { + "group_identifier": "my_test_group", + "uri": "/tasks", + "permissions": ["create", "read", "update", "delete"], + }, + ] + + permissions = GetAllPermissions().run(script_attributes_context) + assert permissions == expected_permissions diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index 8f3864fe8..90e4158da 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -68,7 +68,7 @@ class TestGetLocaltime(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - human_task = process_instance.human_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) @@ -81,7 +81,7 @@ class TestGetLocaltime(BaseTest): human_task, ) - human_task = process_instance.human_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py new file mode 100644 index 000000000..67cf55c85 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py @@ -0,0 +1,50 @@ +"""Test_get_localtime.""" +import pytest +from flask.app import Flask +from flask.testing import FlaskClient +from flask_bpmn.api.api_error import ApiError +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestRefreshPermissions(BaseTest): + """TestRefreshPermissions.""" + + def test_refresh_permissions_requires_elevated_permission( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_refresh_permissions_requires_elevated_permission.""" + basic_user = self.find_or_create_user("basic_user") + privileged_user = self.find_or_create_user("privileged_user") + self.add_permissions_to_user( + privileged_user, + target_uri="/can-run-privileged-script/refresh_permissions", + permission_names=["create"], + ) + process_model = load_test_spec( + process_model_id="refresh_permissions", + process_model_source_directory="script_refresh_permissions", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=basic_user + ) + + processor = ProcessInstanceProcessor(process_instance) + + with pytest.raises(ApiError) as exception: + processor.do_engine_steps(save=True) + assert "ScriptUnauthorizedForUserError" in str(exception) + + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=privileged_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py index 96eb62970..738896cd7 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py @@ -24,17 +24,18 @@ class TestSaveProcessInstanceMetadata(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_save_process_instance_metadata.""" - initiator_user = self.find_or_create_user("initiator_user") self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) process_model = load_test_spec( - process_model_id="save_process_instance_metadata/save_process_instance_metadata", + process_model_id=( + "save_process_instance_metadata/save_process_instance_metadata" + ), bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user + process_model=process_model, user=with_super_admin_user ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index 05375754d..83ed7fd8e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -4,9 +4,12 @@ from flask import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.authorization_service import InvalidPermissionError +from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) @@ -14,6 +17,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.user_service import UserService class TestAuthorizationService(BaseTest): @@ -121,7 +125,7 @@ class TestAuthorizationService(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - human_task = process_instance.human_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) @@ -129,13 +133,286 @@ class TestAuthorizationService(BaseTest): processor, spiff_task, {}, initiator_user, human_task ) - human_task = process_instance.human_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) finance_user = AuthorizationService.create_user_from_sign_in( - {"username": "testuser2", "sub": "open_id"} + { + "username": "testuser2", + "sub": "testuser2", + "iss": "https://test.stuff", + "email": "testuser2", + } ) ProcessInstanceService.complete_form_task( processor, spiff_task, {}, finance_user, human_task ) + + def test_explode_permissions_all_on_process_group( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_all_on_process_group.""" + expected_permissions = [ + ("/logs/some-process-group:some-process-model:*", "read"), + ("/process-data/some-process-group:some-process-model:*", "read"), + ("/process-groups/some-process-group:some-process-model:*", "create"), + ("/process-groups/some-process-group:some-process-model:*", "delete"), + ("/process-groups/some-process-group:some-process-model:*", "read"), + ("/process-groups/some-process-group:some-process-model:*", "update"), + ( + "/process-instance-suspend/some-process-group:some-process-model:*", + "create", + ), + ( + "/process-instance-terminate/some-process-group:some-process-model:*", + "create", + ), + ("/process-instances/some-process-group:some-process-model:*", "create"), + ("/process-instances/some-process-group:some-process-model:*", "delete"), + ("/process-instances/some-process-group:some-process-model:*", "read"), + ("/process-models/some-process-group:some-process-model:*", "create"), + ("/process-models/some-process-group:some-process-model:*", "delete"), + ("/process-models/some-process-group:some-process-model:*", "read"), + ("/process-models/some-process-group:some-process-model:*", "update"), + ("/task-data/some-process-group:some-process-model:*", "read"), + ("/task-data/some-process-group:some-process-model:*", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "all", "PG:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_start_on_process_group( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_start_on_process_group.""" + expected_permissions = [ + ( + "/process-instances/for-me/some-process-group:some-process-model:*", + "read", + ), + ("/process-instances/some-process-group:some-process-model:*", "create"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "start", "PG:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_all_on_process_model( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_all_on_process_model.""" + expected_permissions = [ + ("/logs/some-process-group:some-process-model/*", "read"), + ("/process-data/some-process-group:some-process-model/*", "read"), + ( + "/process-instance-suspend/some-process-group:some-process-model/*", + "create", + ), + ( + "/process-instance-terminate/some-process-group:some-process-model/*", + "create", + ), + ("/process-instances/some-process-group:some-process-model/*", "create"), + ("/process-instances/some-process-group:some-process-model/*", "delete"), + ("/process-instances/some-process-group:some-process-model/*", "read"), + ("/process-models/some-process-group:some-process-model/*", "create"), + ("/process-models/some-process-group:some-process-model/*", "delete"), + ("/process-models/some-process-group:some-process-model/*", "read"), + ("/process-models/some-process-group:some-process-model/*", "update"), + ("/task-data/some-process-group:some-process-model/*", "read"), + ("/task-data/some-process-group:some-process-model/*", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "all", "PM:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_start_on_process_model( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_start_on_process_model.""" + expected_permissions = [ + ( + "/process-instances/for-me/some-process-group:some-process-model/*", + "read", + ), + ("/process-instances/some-process-group:some-process-model/*", "create"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "start", "PM:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_basic( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_basic.""" + expected_permissions = [ + ("/process-instances/for-me", "read"), + ("/process-instances/reports/*", "create"), + ("/process-instances/reports/*", "delete"), + ("/process-instances/reports/*", "read"), + ("/process-instances/reports/*", "update"), + ("/processes", "read"), + ("/service-tasks", "read"), + ("/tasks/*", "create"), + ("/tasks/*", "delete"), + ("/tasks/*", "read"), + ("/tasks/*", "update"), + ("/user-groups/for-current-user", "read"), + ] + permissions_to_assign = AuthorizationService.explode_permissions("all", "BASIC") + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_all( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_all.""" + expected_permissions = [ + ("/*", "create"), + ("/*", "delete"), + ("/*", "read"), + ("/*", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions("all", "ALL") + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_with_target_uri( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_with_target_uri.""" + expected_permissions = [ + ("/hey/model", "create"), + ("/hey/model", "delete"), + ("/hey/model", "read"), + ("/hey/model", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "all", "/hey/model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_granting_access_to_group_gives_access_to_group_and_subgroups( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_granting_access_to_group_gives_access_to_group_and_subgroups.""" + user = self.find_or_create_user(username="user_one") + user_group = GroupService.find_or_create_group("group_one") + UserService.add_user_to_group(user, user_group) + AuthorizationService.add_permission_from_uri_or_macro( + user_group.identifier, "read", "PG:hey" + ) + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + + def test_explode_permissions_with_invalid_target_uri( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_with_invalid_target_uri.""" + with pytest.raises(InvalidPermissionError): + AuthorizationService.explode_permissions("all", "BAD_MACRO") + + def test_explode_permissions_with_start_to_incorrect_target( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_with_start_to_incorrect_target.""" + with pytest.raises(InvalidPermissionError): + AuthorizationService.explode_permissions("start", "/hey/model") + + def test_can_refresh_permissions( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_refresh_permissions.""" + user = self.find_or_create_user(username="user_one") + admin_user = self.find_or_create_user(username="testadmin1") + + # this group is not mentioned so it will get deleted + GroupService.find_or_create_group("group_two") + assert GroupModel.query.filter_by(identifier="group_two").first() is not None + + group_info = [ + { + "users": ["user_one"], + "name": "group_one", + "permissions": [{"actions": ["create", "read"], "uri": "PG:hey"}], + } + ] + AuthorizationService.refresh_permissions(group_info) + assert GroupModel.query.filter_by(identifier="group_two").first() is None + assert GroupModel.query.filter_by(identifier="group_one").first() is not None + self.assert_user_has_permission(admin_user, "create", "/anything-they-want") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo") + + group_info = [ + { + "users": ["user_one"], + "name": "group_one", + "permissions": [{"actions": ["read"], "uri": "PG:hey"}], + } + ] + AuthorizationService.refresh_permissions(group_info) + assert GroupModel.query.filter_by(identifier="group_one").first() is not None + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + self.assert_user_has_permission( + user, "create", "/v1.0/process-groups/hey:yo", expected_result=False + ) + self.assert_user_has_permission(admin_user, "create", "/anything-they-want") diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py index b66f32370..a96989697 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py @@ -16,6 +16,7 @@ from spiffworkflow_backend.services.user_service import UserService # we think we can get the list of roles for a user. # spiff needs a way to determine what each role allows. + # user role allows list and read of all process groups/models # super-admin role allows create, update, and delete of all process groups/models # * super-admins users maybe conventionally get the user role as well diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 1a96ca882..b4a650dc6 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -31,10 +31,14 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, ) -> None: """Test_script_engine_takes_data_and_returns_expected_results.""" + app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey" + app.config["THREAD_LOCAL_DATA"].process_instance_id = 0 script_engine = ProcessInstanceProcessor._script_engine result = script_engine._evaluate("a", {"a": 1}) assert result == 1 + app.config["THREAD_LOCAL_DATA"].process_model_identifier = None + app.config["THREAD_LOCAL_DATA"].process_instance_id = None def test_script_engine_can_use_custom_scripts( self, @@ -42,12 +46,17 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, ) -> None: """Test_script_engine_takes_data_and_returns_expected_results.""" + app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey" + app.config["THREAD_LOCAL_DATA"].process_instance_id = 0 script_engine = ProcessInstanceProcessor._script_engine result = script_engine._evaluate("fact_service(type='norris')", {}) assert ( result - == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." + == "Chuck Norris doesn’t read books. He stares them down until he gets the" + " information he wants." ) + app.config["THREAD_LOCAL_DATA"].process_model_identifier = None + app.config["THREAD_LOCAL_DATA"].process_instance_id = None def test_sets_permission_correctly_on_human_task( self, @@ -80,8 +89,8 @@ class TestProcessInstanceProcessor(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None assert len(human_task.potential_owners) == 1 assert human_task.potential_owners[0] == initiator_user @@ -97,8 +106,8 @@ class TestProcessInstanceProcessor(BaseTest): processor, spiff_task, {}, initiator_user, human_task ) - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id == finance_group.id assert len(human_task.potential_owners) == 1 assert human_task.potential_owners[0] == finance_user @@ -114,8 +123,8 @@ class TestProcessInstanceProcessor(BaseTest): ProcessInstanceService.complete_form_task( processor, spiff_task, {}, finance_user, human_task ) - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None assert len(human_task.potential_owners) == 1 assert human_task.potential_owners[0] == initiator_user @@ -163,8 +172,8 @@ class TestProcessInstanceProcessor(BaseTest): processor.do_engine_steps(save=True) processor.save() - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None assert len(human_task.potential_owners) == 1 assert human_task.potential_owners[0] == initiator_user @@ -181,8 +190,8 @@ class TestProcessInstanceProcessor(BaseTest): ) assert human_task.completed_by_user_id == initiator_user.id - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None assert len(human_task.potential_owners) == 2 assert human_task.potential_owners == [finance_user_three, finance_user_four] @@ -200,8 +209,8 @@ class TestProcessInstanceProcessor(BaseTest): processor, spiff_task, {}, finance_user_three, human_task ) assert human_task.completed_by_user_id == finance_user_three.id - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None assert len(human_task.potential_owners) == 1 assert human_task.potential_owners[0] == finance_user_four @@ -218,8 +227,8 @@ class TestProcessInstanceProcessor(BaseTest): processor, spiff_task, {}, finance_user_four, human_task ) assert human_task.completed_by_user_id == finance_user_four.id - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None assert len(human_task.potential_owners) == 1 assert human_task.potential_owners[0] == initiator_user @@ -231,8 +240,8 @@ class TestProcessInstanceProcessor(BaseTest): processor, spiff_task, {}, initiator_user, human_task ) - assert len(process_instance.human_tasks) == 1 - human_task = process_instance.human_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) @@ -276,11 +285,11 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - assert len(process_instance.human_tasks) == 1 - initial_human_task_id = process_instance.human_tasks[0].id + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id # save again to ensure we go attempt to process the human tasks again processor.save() - assert len(process_instance.human_tasks) == 1 - assert initial_human_task_id == process_instance.human_tasks[0].id + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py index 75ad3f28e..b40412ff8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py @@ -880,7 +880,9 @@ class TestProcessInstanceReportService(BaseTest): process_instance_report = ProcessInstanceReportService.report_with_identifier( user=user_one, - report_identifier="system_report_completed_instances_with_tasks_completed_by_me", + report_identifier=( + "system_report_completed_instances_with_tasks_completed_by_me" + ), ) report_filter = ( ProcessInstanceReportService.filter_from_metadata_with_overrides( @@ -983,7 +985,9 @@ class TestProcessInstanceReportService(BaseTest): process_instance_report = ProcessInstanceReportService.report_with_identifier( user=user_one, - report_identifier="system_report_completed_instances_with_tasks_completed_by_my_groups", + report_identifier=( + "system_report_completed_instances_with_tasks_completed_by_my_groups" + ), ) report_filter = ( ProcessInstanceReportService.filter_from_metadata_with_overrides( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py new file mode 100644 index 000000000..959975d5b --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py @@ -0,0 +1,54 @@ +"""Process Model.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.user_group_assignment_waiting import ( + UserGroupAssignmentWaitingModel, +) +from spiffworkflow_backend.services.group_service import GroupService +from spiffworkflow_backend.services.user_service import UserService + + +class TestUserService(BaseTest): + """TestUserService.""" + + def test_assigning_a_group_to_a_user_before_the_user_is_created( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_waiting_group_assignments.""" + a_test_group = GroupService.find_or_create_group("aTestGroup") + UserService.add_waiting_group_assignment("initiator_user", a_test_group) + initiator_user = self.find_or_create_user("initiator_user") + assert initiator_user.groups[0] == a_test_group + + def test_assigning_a_group_to_all_users_updates_new_users( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_waiting_group_assignments.""" + everybody_group = GroupService.find_or_create_group("everybodyGroup") + UserService.add_waiting_group_assignment( + UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group + ) + initiator_user = self.find_or_create_user("initiator_user") + assert initiator_user.groups[0] == everybody_group + + def test_assigning_a_group_to_all_users_updates_existing_users( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_waiting_group_assignments.""" + initiator_user = self.find_or_create_user("initiator_user") + everybody_group = GroupService.find_or_create_group("everybodyGroup") + UserService.add_waiting_group_assignment( + UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group + ) + assert initiator_user.groups[0] == everybody_group diff --git a/spiffworkflow-frontend/bin/collect_cypress_stats b/spiffworkflow-frontend/bin/collect_cypress_stats new file mode 100755 index 000000000..150efc800 --- /dev/null +++ b/spiffworkflow-frontend/bin/collect_cypress_stats @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +# see also: npx cypress run --env grep="can filter",grepFilterSpecs=true +# https://github.com/cypress-io/cypress/tree/develop/npm/grep#pre-filter-specs-grepfilterspecs + +iterations="${1:-10}" + +test_case_matches="$(rg '^ it\(')" + +stats_file="/var/tmp/cypress_stats.txt" + +function run_all_test_cases() { + local stat_index="$1" + + pushd "$NO_TERM_LIMITS_PROJECTS_DIR/github/sartography/sample-process-models" + gitc + popd + + while read -r test_case_line; do + test_case_file="$(awk -F: '{print $1}' <<< "$test_case_line")" + test_case_name_side="$(awk -F: '{print $2}' <<< "$test_case_line")" + test_case_name=$(hot_sed -E "s/^\s+it\('(.+)'.*/\1/" <<< "$test_case_name_side") + echo "running test case: $test_case_file::$test_case_name" + if ./node_modules/.bin/cypress run --e2e --browser chrome --spec "$test_case_file" --env grep="$test_case_name"; then + echo "$stat_index:::$test_case_file:::$test_case_name: PASS" >> "$stats_file" + else + echo "$stat_index:::$test_case_file:::$test_case_name: FAIL" >> "$stats_file" + fi + done <<< "$test_case_matches" +} + +# clear the stats file +echo > "$stats_file" + +for ((global_stat_index=1;global_stat_index<=$iterations;global_stat_index++)); do +# for global_stat_index in {1..$iterations}; do + run_all_test_cases "$global_stat_index" +done + +# prints summary of most-failing test cases +grep FAIL "$stats_file" | awk -F ':::' '{for (i=2; i { + const filesToDelete = [] + on('after:spec', (_spec, results) => { + if (results.stats.failures === 0 && results.video) { + filesToDelete.push(results.video) + } + }) + on('after:run', async () => { + if (filesToDelete.length) { + console.log( + 'after:run hook: Deleting %d video(s) from successful specs', + filesToDelete.length + ) + await Promise.all(filesToDelete.map((videoFile) => rm(videoFile))) + } + }) +} module.exports = defineConfig({ projectId: 'crax1q', + + // since it's slow + videoCompression: useVideoCompression, + + videoUploadOnPasses: false, chromeWebSecurity: false, e2e: { baseUrl: 'http://localhost:7001', - setupNodeEvents(_on, config) { + setupNodeEvents(on, config) { + deleteVideosOnSuccess(on) require('@cypress/grep/src/plugin')(config); return config; }, diff --git a/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js b/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js index bef0e5603..e10c48575 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js @@ -30,7 +30,10 @@ describe('process-groups', () => { .find('.cds--btn--danger') .click(); cy.url().should('include', `process-groups`); - cy.contains(groupId).should('not.exist'); + cy.contains(newGroupDisplayName).should('not.exist'); + + // meaning the process group list page is loaded, so we can sign out safely without worrying about ajax requests failing + cy.get('.tile-process-group-content-container').should('exist'); }); // process groups no longer has pagination post-tiles diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index 4d33d13f1..e582dcbba 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -68,8 +68,7 @@ describe('process-instances', () => { cy.login(); cy.navigateToProcessModel( 'Acceptance Tests Group One', - 'Acceptance Tests Model 1', - 'acceptance-tests-model-1' + 'Acceptance Tests Model 1' ); }); afterEach(() => { @@ -80,6 +79,7 @@ describe('process-instances', () => { const originalDmnOutputForKevin = 'Very wonderful'; const newDmnOutputForKevin = 'The new wonderful'; const dmnOutputForDan = 'pretty wonderful'; + const acceptanceTestOneDisplayName = 'Acceptance Tests Model 1'; const originalPythonScript = 'person = "Kevin"'; const newPythonScript = 'person = "Dan"'; @@ -95,13 +95,13 @@ describe('process-instances', () => { cy.getBySel(`edit-file-${dmnFile.replace('.', '-')}`).click(); updateDmnText(originalDmnOutputForKevin, newDmnOutputForKevin); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); cy.getBySel('files-accordion').click(); cy.getBySel(`edit-file-${dmnFile.replace('.', '-')}`).click(); updateDmnText(newDmnOutputForKevin, originalDmnOutputForKevin); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); // Change bpmn @@ -109,13 +109,13 @@ describe('process-instances', () => { cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); cy.contains(`Process Model File: ${bpmnFile}`); updateBpmnPythonScript(newPythonScript); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); cy.getBySel('files-accordion').click(); cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); updateBpmnPythonScript(originalPythonScript); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); }); @@ -160,6 +160,7 @@ describe('process-instances', () => { cy.getBySel('process-instance-list-link').click(); cy.getBySel('process-instance-show-link').first().click(); cy.getBySel('process-instance-log-list-link').click(); + cy.getBySel('process-instance-log-detailed').click(); cy.contains('process_model_one'); cy.contains('State change to COMPLETED'); cy.basicPaginationTest(); @@ -167,6 +168,8 @@ describe('process-instances', () => { it('can filter', () => { cy.getBySel('process-instance-list-link').click(); + cy.getBySel('process-instance-list-all').click(); + cy.contains('All Process Instances'); cy.assertAtLeastOneItemInPaginatedResults(); const statusSelect = '#process-instance-status-select'; @@ -174,6 +177,7 @@ describe('process-instances', () => { if (!['all', 'waiting'].includes(processStatus)) { cy.get(statusSelect).click(); cy.get(statusSelect).contains(processStatus).click(); + cy.get(statusSelect).click(); cy.getBySel('filter-button').click(); // FIXME: wait a little bit for the useEffects to be able to fully set processInstanceFilters cy.wait(1000); diff --git a/spiffworkflow-frontend/cypress/e2e/process_models.cy.js b/spiffworkflow-frontend/cypress/e2e/process_models.cy.js index b2a64e6ee..6b51ee918 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_models.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_models.cy.js @@ -42,7 +42,7 @@ describe('process-models', () => { cy.contains(modelDisplayName).should('not.exist'); }); - it('can create new bpmn, dmn, and json files', () => { + it('can create new bpmn and dmn and json files', () => { const uuid = () => Cypress._.random(0, 1e6); const id = uuid(); const directParentGroupId = 'acceptance-tests-group-one'; @@ -127,6 +127,9 @@ describe('process-models', () => { cy.deleteProcessModelAndConfirm(deleteProcessModelButtonId, groupId); cy.contains(modelId).should('not.exist'); cy.contains(modelDisplayName).should('not.exist'); + + // we go back to the parent process group after deleting the model + cy.get('.tile-process-group-content-container').should('exist'); }); it('can upload and run a bpmn file', () => { diff --git a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js index e58566b8c..922c42091 100644 --- a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js @@ -13,11 +13,10 @@ const checkTaskHasClass = (taskName, className) => { cy.get(`g[data-element-id=${taskName}]`).should('have.class', className); }; -const kickOffModelWithForm = (modelId, formName) => { +const kickOffModelWithForm = () => { cy.navigateToProcessModel( 'Acceptance Tests Group One', - 'Acceptance Tests Model 2', - 'acceptance-tests-model-2' + 'Acceptance Tests Model 2' ); cy.runPrimaryBpmnFile(true); }; @@ -32,12 +31,11 @@ describe('tasks', () => { it('can complete and navigate a form', () => { const groupDisplayName = 'Acceptance Tests Group One'; - const modelId = `acceptance-tests-model-2`; const modelDisplayName = `Acceptance Tests Model 2`; const completedTaskClassName = 'completed-task-highlight'; const activeTaskClassName = 'active-task-highlight'; - cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); + cy.navigateToProcessModel(groupDisplayName, modelDisplayName); cy.runPrimaryBpmnFile(true); submitInputIntoFormField( @@ -71,7 +69,7 @@ describe('tasks', () => { ); cy.contains('Task: get_user_generated_number_four'); - cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); + cy.navigateToProcessModel(groupDisplayName, modelDisplayName); cy.getBySel('process-instance-list-link').click(); cy.assertAtLeastOneItemInPaginatedResults(); @@ -94,7 +92,7 @@ describe('tasks', () => { cy.contains('Tasks').should('exist'); // FIXME: this will probably need a better way to link to the proper form that we want - cy.contains('Complete Task').click(); + cy.contains('Go').click(); submitInputIntoFormField( 'get_user_generated_number_four', @@ -103,7 +101,7 @@ describe('tasks', () => { ); cy.url().should('include', '/tasks'); - cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); + cy.navigateToProcessModel(groupDisplayName, modelDisplayName); cy.getBySel('process-instance-list-link').click(); cy.assertAtLeastOneItemInPaginatedResults(); @@ -122,6 +120,6 @@ describe('tasks', () => { kickOffModelWithForm(); cy.navigateToHome(); - cy.basicPaginationTest(); + cy.basicPaginationTest('process-instance-show-link'); }); }); diff --git a/spiffworkflow-frontend/cypress/support/commands.js b/spiffworkflow-frontend/cypress/support/commands.js index 7a910cae0..8369a22c3 100644 --- a/spiffworkflow-frontend/cypress/support/commands.js +++ b/spiffworkflow-frontend/cypress/support/commands.js @@ -95,14 +95,16 @@ Cypress.Commands.add( } else { cy.contains(/Process Instance.*[kK]icked [oO]ff/); cy.reload(true); + cy.contains('Process Model:').should('exist'); cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist'); + cy.contains('[data-qa=process-model-show-permissions-loaded]', 'true'); } } ); Cypress.Commands.add( 'navigateToProcessModel', - (groupDisplayName, modelDisplayName, modelIdentifier) => { + (groupDisplayName, modelDisplayName) => { cy.navigateToAdmin(); cy.contains(miscDisplayName).click(); cy.contains(`Process Group: ${miscDisplayName}`, { timeout: 10000 }); @@ -114,17 +116,33 @@ Cypress.Commands.add( } ); -Cypress.Commands.add('basicPaginationTest', () => { - cy.getBySel('pagination-options').scrollIntoView(); - cy.get('.cds--select__item-count').find('.cds--select-input').select('2'); +Cypress.Commands.add( + 'basicPaginationTest', + (dataQaTagToUseToEnsureTableHasLoaded = 'paginated-entity-id') => { + cy.getBySel('pagination-options').scrollIntoView(); + cy.get('.cds--select__item-count').find('.cds--select-input').select('2'); - // NOTE: this is a em dash instead of en dash - cy.contains(/\b1–2 of \d+/); - cy.get('.cds--pagination__button--forward').click(); - cy.contains(/\b3–4 of \d+/); - cy.get('.cds--pagination__button--backward').click(); - cy.contains(/\b1–2 of \d+/); -}); + // NOTE: this is a em dash instead of en dash + cy.contains(/\b1–2 of \d+/); + + // ok, trying to ensure that we have everything loaded before we leave this + // function and try to sign out. Just showing results 1-2 of blah is not good enough, + // since the ajax request may not have finished yet. + // to be sure it's finished, grab the log id from page 1. remember it. + // then use the magical contains command that waits for the element to exist AND + // for that element to contain the text we're looking for. + cy.getBySel(dataQaTagToUseToEnsureTableHasLoaded) + .first() + .then(($element) => { + const oldId = $element.text().trim(); + cy.get('.cds--pagination__button--forward').click(); + cy.contains(/\b3–4 of \d+/); + cy.get('.cds--pagination__button--backward').click(); + cy.contains(/\b1–2 of \d+/); + cy.contains(`[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`, oldId); + }); + } +); Cypress.Commands.add('assertAtLeastOneItemInPaginatedResults', () => { cy.contains(/\b[1-9]\d*–[1-9]\d* of [1-9]\d*/); diff --git a/spiffworkflow-frontend/public/index.html b/spiffworkflow-frontend/public/index.html index ae3a23076..1a7cafa95 100644 --- a/spiffworkflow-frontend/public/index.html +++ b/spiffworkflow-frontend/public/index.html @@ -7,7 +7,7 @@ - spiffworkflow-frontend + SpiffWorkflow diff --git a/spiffworkflow-frontend/src/App.tsx b/spiffworkflow-frontend/src/App.tsx index 6357a713f..ecf9fc54c 100644 --- a/spiffworkflow-frontend/src/App.tsx +++ b/spiffworkflow-frontend/src/App.tsx @@ -14,15 +14,14 @@ import { ErrorForDisplay } from './interfaces'; import { AbilityContext } from './contexts/Can'; import UserService from './services/UserService'; +import ErrorDisplay from './components/ErrorDisplay'; export default function App() { - const [errorMessage, setErrorMessage] = useState( - null - ); + const [errorObject, setErrorObject] = useState(null); const errorContextValueArray = useMemo( - () => [errorMessage, setErrorMessage], - [errorMessage] + () => [errorObject, setErrorObject], + [errorObject] ); if (!UserService.isLoggedIn()) { @@ -32,29 +31,6 @@ export default function App() { const ability = defineAbility(() => {}); - let errorTag = null; - if (errorMessage) { - let sentryLinkTag = null; - if (errorMessage.sentry_link) { - sentryLinkTag = ( - - { - ': Find details about this error here (it may take a moment to become available): ' - } - - {errorMessage.sentry_link} - - - ); - } - errorTag = ( - - ); - } - return (
{/* @ts-ignore */} @@ -63,7 +39,7 @@ export default function App() { - {errorTag} + } /> diff --git a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx new file mode 100644 index 000000000..cdbed75a0 --- /dev/null +++ b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx @@ -0,0 +1,55 @@ +import { useContext } from 'react'; +import ErrorContext from '../contexts/ErrorContext'; +import { Notification } from './Notification'; + +export default function ErrorDisplay() { + const [errorObject, setErrorObject] = (useContext as any)(ErrorContext); + + let errorTag = null; + if (errorObject) { + let sentryLinkTag = null; + if (errorObject.sentry_link) { + sentryLinkTag = ( + + { + ': Find details about this error here (it may take a moment to become available): ' + } + + {errorObject.sentry_link} + + + ); + } + + let message =
{errorObject.message}
; + let title = 'Error:'; + if ('task_name' in errorObject && errorObject.task_name) { + title = 'Error in python script:'; + message = ( + <> +
+
+ Task: {errorObject.task_name} ({errorObject.task_id}) +
+
File name: {errorObject.file_name}
+
Line number in script task: {errorObject.line_number}
+
+
{errorObject.message}
+ + ); + } + + errorTag = ( + setErrorObject(null)} + type="error" + > + {message} + {sentryLinkTag} + + ); + } + + return errorTag; +} diff --git a/spiffworkflow-frontend/src/components/NavigationBar.tsx b/spiffworkflow-frontend/src/components/NavigationBar.tsx index 7a0ffd3ea..e482ae526 100644 --- a/spiffworkflow-frontend/src/components/NavigationBar.tsx +++ b/spiffworkflow-frontend/src/components/NavigationBar.tsx @@ -81,7 +81,7 @@ export default function NavigationBar() { return ( <> - {UserService.getUsername()} + {UserService.getPreferredUsername()} ; if (type === 'error') { - iconClassName = 'red-icon'; + iconComponent = ; } return (
- + {iconComponent}
{title}
{children}
diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListDeleteReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListDeleteReport.tsx new file mode 100644 index 000000000..ca04d516c --- /dev/null +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListDeleteReport.tsx @@ -0,0 +1,29 @@ +import { ProcessInstanceReport } from '../interfaces'; +import HttpService from '../services/HttpService'; +import ButtonWithConfirmation from './ButtonWithConfirmation'; + +type OwnProps = { + onSuccess: (..._args: any[]) => any; + processInstanceReportSelection: ProcessInstanceReport; +}; + +export default function ProcessInstanceListDeleteReport({ + onSuccess, + processInstanceReportSelection, +}: OwnProps) { + const deleteProcessInstanceReport = () => { + HttpService.makeCallToBackend({ + path: `/process-instances/reports/${processInstanceReportSelection.id}`, + successCallback: onSuccess, + httpMethod: 'DELETE', + }); + }; + + return ( + + ); +} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index d961627ab..e89c355ba 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1,4 +1,4 @@ -import { useContext, useEffect, useMemo, useState } from 'react'; +import { useContext, useEffect, useMemo, useRef, useState } from 'react'; import { Link, useNavigate, @@ -40,6 +40,7 @@ import { getProcessModelFullIdentifierFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + setErrorMessageSafely, } from '../helpers'; import PaginationForTable from './PaginationForTable'; @@ -59,9 +60,11 @@ import { ReportColumnForEditing, ReportMetadata, ReportFilter, + User, } from '../interfaces'; import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; +import ProcessInstanceListDeleteReport from './ProcessInstanceListDeleteReport'; import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport'; import { FormatProcessModelDisplayName } from './MiniComponents'; import { Notification } from './Notification'; @@ -130,9 +133,13 @@ export default function ProcessInstanceListTable({ const [endFromTimeInvalid, setEndFromTimeInvalid] = useState(false); const [endToTimeInvalid, setEndToTimeInvalid] = useState(false); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const [errorObject, setErrorObject] = (useContext as any)(ErrorContext); - const processInstancePathPrefix = + const processInstanceListPathPrefix = + variant === 'all' + ? '/admin/process-instances/all' + : '/admin/process-instances/for-me'; + const processInstanceShowPathPrefix = variant === 'all' ? '/admin/process-instances' : '/admin/process-instances/for-me'; @@ -162,6 +169,12 @@ export default function ProcessInstanceListTable({ useState(null); const [reportColumnFormMode, setReportColumnFormMode] = useState(''); + const [processInstanceInitiatorOptions, setProcessInstanceInitiatorOptions] = + useState([]); + const [processInitiatorSelection, setProcessInitiatorSelection] = + useState(null); + const lastRequestedInitatorSearchTerm = useRef(); + const dateParametersToAlwaysFilterBy: dateParameters = useMemo(() => { return { start_from: [setStartFromDate, setStartFromTime], @@ -307,7 +320,7 @@ export default function ProcessInstanceListTable({ if (filtersEnabled) { // populate process model selection HttpService.makeCallToBackend({ - path: `/process-models?per_page=1000&recursive=true`, + path: `/process-models?per_page=1000&recursive=true&include_parent_groups=true`, successCallback: processResultForProcessModels, }); } else { @@ -428,8 +441,11 @@ export default function ProcessInstanceListTable({ } }; - // TODO: after factoring this out page hangs when invalid date ranges and applying the filter - const calculateStartAndEndSeconds = () => { + // jasquat/burnettk - 2022-12-28 do not check the validity of the dates when rendering components to avoid the page being + // re-rendered while the user is still typing. NOTE that we also prevented rerendering + // with the use of the setErrorMessageSafely function. we are not sure why the context not + // changing still causes things to rerender when we call its setter without our extra check. + const calculateStartAndEndSeconds = (validate: boolean = true) => { const startFromSeconds = convertDateAndTimeStringsToSeconds( startFromDate, startFromTime || '00:00:00' @@ -447,29 +463,25 @@ export default function ProcessInstanceListTable({ endToTime || '00:00:00' ); let valid = true; - if (isTrueComparison(startFromSeconds, '>', startToSeconds)) { - setErrorMessage({ - message: '"Start date from" cannot be after "start date to"', - }); - valid = false; - } - if (isTrueComparison(endFromSeconds, '>', endToSeconds)) { - setErrorMessage({ - message: '"End date from" cannot be after "end date to"', - }); - valid = false; - } - if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) { - setErrorMessage({ - message: '"Start date from" cannot be after "end date from"', - }); - valid = false; - } - if (isTrueComparison(startToSeconds, '>', endToSeconds)) { - setErrorMessage({ - message: '"Start date to" cannot be after "end date to"', - }); - valid = false; + + if (validate) { + let message = ''; + if (isTrueComparison(startFromSeconds, '>', startToSeconds)) { + message = '"Start date from" cannot be after "start date to"'; + } + if (isTrueComparison(endFromSeconds, '>', endToSeconds)) { + message = '"End date from" cannot be after "end date to"'; + } + if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) { + message = '"Start date from" cannot be after "end date from"'; + } + if (isTrueComparison(startToSeconds, '>', endToSeconds)) { + message = '"Start date to" cannot be after "end date to"'; + } + if (message !== '') { + valid = false; + setErrorMessageSafely(message, errorObject, setErrorObject); + } } return { @@ -526,9 +538,9 @@ export default function ProcessInstanceListTable({ queryParamString += `&report_id=${processInstanceReportSelection.id}`; } - setErrorMessage(null); + setErrorObject(null); setProcessInstanceReportJustSaved(null); - navigate(`${processInstancePathPrefix}?${queryParamString}`); + navigate(`${processInstanceListPathPrefix}?${queryParamString}`); }; const dateComponent = ( @@ -625,9 +637,9 @@ export default function ProcessInstanceListTable({ queryParamString = `?report_id=${selectedReport.id}`; } - setErrorMessage(null); + setErrorObject(null); setProcessInstanceReportJustSaved(mode || null); - navigate(`${processInstancePathPrefix}${queryParamString}`); + navigate(`${processInstanceListPathPrefix}${queryParamString}`); }; const reportColumns = () => { @@ -657,7 +669,7 @@ export default function ProcessInstanceListTable({ startToSeconds, endFromSeconds, endToSeconds, - } = calculateStartAndEndSeconds(); + } = calculateStartAndEndSeconds(false); if (!valid || !reportMetadata) { return null; @@ -681,6 +693,19 @@ export default function ProcessInstanceListTable({ ); }; + const onDeleteReportSuccess = () => { + processInstanceReportDidChange({ selectedItem: null }); + }; + + const deleteReportComponent = () => { + return processInstanceReportSelection ? ( + + ) : null; + }; + const removeColumn = (reportColumn: ReportColumn) => { if (reportMetadata) { const reportMetadataCopy = { ...reportMetadata }; @@ -760,7 +785,6 @@ export default function ProcessInstanceListTable({ setReportMetadata(reportMetadataCopy); setReportColumnToOperateOn(null); setShowReportColumnForm(false); - setShowReportColumnForm(false); } }; @@ -781,9 +805,12 @@ export default function ProcessInstanceListTable({ }; const updateReportColumn = (event: any) => { - const reportColumnForEditing = reportColumnToReportColumnForEditing( - event.selectedItem - ); + let reportColumnForEditing = null; + if (event.selectedItem) { + reportColumnForEditing = reportColumnToReportColumnForEditing( + event.selectedItem + ); + } setReportColumnToOperateOn(reportColumnForEditing); }; @@ -813,7 +840,29 @@ export default function ProcessInstanceListTable({ if (reportColumnFormMode === '') { return null; } - const formElements = [ + const formElements = []; + if (reportColumnFormMode === 'new') { + formElements.push( + { + if (reportColumn) { + return reportColumn.accessor; + } + return null; + }} + shouldFilterItem={shouldFilterReportColumn} + placeholder="Choose a column to show" + titleText="Column" + selectedItem={reportColumnToOperateOn} + /> + ); + } + formElements.push([ , - ]; + ]); if (reportColumnToOperateOn && reportColumnToOperateOn.filterable) { formElements.push( ); } - if (reportColumnFormMode === 'new') { - formElements.push( - { - if (reportColumn) { - return reportColumn.accessor; - } - return null; - }} - shouldFilterItem={shouldFilterReportColumn} - placeholder="Choose a column to show" - titleText="Column" - /> - ); - } + formElements.push( +
+ ); const modalHeading = reportColumnFormMode === 'new' ? 'Add Column' @@ -956,6 +987,22 @@ export default function ProcessInstanceListTable({ return null; }; + const handleProcessInstanceInitiatorSearchResult = (result: any) => { + if (lastRequestedInitatorSearchTerm.current === result.username_prefix) { + setProcessInstanceInitiatorOptions(result.users); + } + }; + + const searchForProcessInitiator = (inputText: string) => { + if (inputText) { + lastRequestedInitatorSearchTerm.current = inputText; + HttpService.makeCallToBackend({ + path: `/users/search?username_prefix=${inputText}`, + successCallback: handleProcessInstanceInitiatorSearchResult, + }); + } + }; + const filterOptions = () => { if (!showFilterOptions) { return null; @@ -988,7 +1035,27 @@ export default function ProcessInstanceListTable({ selectedItem={processModelSelection} /> - {processStatusSearch()} + + { + setProcessInitiatorSelection(event.selectedItem); + }} + id="process-instance-initiator-search" + data-qa="process-instance-initiator-search" + items={processInstanceInitiatorOptions} + itemToString={(processInstanceInitatorOption: User) => { + if (processInstanceInitatorOption) { + return processInstanceInitatorOption.username; + } + return null; + }} + placeholder="Process Initiator" + titleText="PROC" + selectedItem={processInitiatorSelection} + /> + + {processStatusSearch()} @@ -1062,6 +1129,7 @@ export default function ProcessInstanceListTable({ {saveAsReportComponent()} + {deleteReportComponent()} @@ -1093,10 +1161,10 @@ export default function ProcessInstanceListTable({ return ( - {id} + {id} ); }; diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx index 05b643da8..b956abcc3 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx @@ -78,7 +78,7 @@ export default function ProcessInstanceRun({ checkPermissions = true, }: OwnProps) { const navigate = useNavigate(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const modifiedProcessModelId = modifyProcessIdentifierForPathParam( processModel.id ); @@ -105,12 +105,12 @@ export default function ProcessInstanceRun({ }; const processModelRun = (processInstance: any) => { - setErrorMessage(null); + setErrorObject(null); storeRecentProcessModelInLocalStorage(processModel); HttpService.makeCallToBackend({ path: `/process-instances/${modifiedProcessModelId}/${processInstance.id}/run`, successCallback: onProcessInstanceRun, - failureCallback: setErrorMessage, + failureCallback: setErrorObject, httpMethod: 'POST', }); }; diff --git a/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx b/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx index bf1978237..517a0302b 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx @@ -11,6 +11,7 @@ import { truncateString, } from '../helpers'; import ProcessInstanceRun from './ProcessInstanceRun'; +import { Notification } from './Notification'; type OwnProps = { headerElement?: ReactElement; @@ -50,20 +51,19 @@ export default function ProcessModelListTiles({ const processInstanceRunResultTag = () => { if (processInstance) { return ( -
-

- Process Instance {processInstance.id} kicked off ( - - view - - ). -

-
+ setProcessInstance(null)} + > + + view + + ); } return null; diff --git a/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx b/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx index 8a3c0b9f7..bd995bc3e 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx @@ -2,8 +2,7 @@ import { ComboBox, // @ts-ignore } from '@carbon/react'; -import { truncateString } from '../helpers'; -import { ProcessModel } from '../interfaces'; +import { ProcessGroupLite, ProcessModel } from '../interfaces'; type OwnProps = { onChange: (..._args: any[]) => any; @@ -18,12 +17,27 @@ export default function ProcessModelSearch({ onChange, titleText = 'Process model', }: OwnProps) { + const getParentGroupsDisplayName = (processModel: ProcessModel) => { + if (processModel.parent_groups) { + return processModel.parent_groups + .map((parentGroup: ProcessGroupLite) => { + return parentGroup.display_name; + }) + .join(' / '); + } + return ''; + }; + + const getFullProcessModelLabel = (processModel: ProcessModel) => { + return `${processModel.id} (${getParentGroupsDisplayName(processModel)} ${ + processModel.display_name + })`; + }; + const shouldFilterProcessModel = (options: any) => { const processModel: ProcessModel = options.item; const { inputValue } = options; - return `${processModel.id} (${processModel.display_name})`.includes( - inputValue - ); + return getFullProcessModelLabel(processModel).includes(inputValue); }; return ( { if (processModel) { - return `${processModel.id} (${truncateString( - processModel.display_name, - 75 - )})`; + return getFullProcessModelLabel(processModel); } return null; }} diff --git a/spiffworkflow-frontend/src/components/TaskListTable.tsx b/spiffworkflow-frontend/src/components/TaskListTable.tsx index d6688f22e..2e53bcea6 100644 --- a/spiffworkflow-frontend/src/components/TaskListTable.tsx +++ b/spiffworkflow-frontend/src/components/TaskListTable.tsx @@ -2,6 +2,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Button, Table } from '@carbon/react'; import { Link, useSearchParams } from 'react-router-dom'; +import UserService from '../services/UserService'; import PaginationForTable from './PaginationForTable'; import { convertSecondsToFormattedDateTime, @@ -46,6 +47,9 @@ export default function TaskListTable({ const [tasks, setTasks] = useState(null); const [pagination, setPagination] = useState(null); + const preferredUsername = UserService.getPreferredUsername(); + const userEmail = UserService.getUserEmail(); + useEffect(() => { const getTasks = () => { const { page, perPage } = getPageInfoFromSearchParams( @@ -80,56 +84,82 @@ export default function TaskListTable({ autoReload, ]); + const getWaitingForTableCellComponent = ( + processInstanceTask: ProcessInstanceTask + ) => { + let fullUsernameString = ''; + let shortUsernameString = ''; + if (processInstanceTask.assigned_user_group_identifier) { + fullUsernameString = processInstanceTask.assigned_user_group_identifier; + shortUsernameString = processInstanceTask.assigned_user_group_identifier; + } + if (processInstanceTask.potential_owner_usernames) { + fullUsernameString = processInstanceTask.potential_owner_usernames; + const usernames = + processInstanceTask.potential_owner_usernames.split(','); + const firstTwoUsernames = usernames.slice(0, 2); + if (usernames.length > 2) { + firstTwoUsernames.push('...'); + } + shortUsernameString = firstTwoUsernames.join(','); + } + return {shortUsernameString}; + }; + const buildTable = () => { if (!tasks) { return null; } - const rows = tasks.map((row) => { - const rowToUse = row as any; - const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; + const rows = tasks.map((row: ProcessInstanceTask) => { + const taskUrl = `/tasks/${row.process_instance_id}/${row.task_id}`; const modifiedProcessModelIdentifier = - modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); + modifyProcessIdentifierForPathParam(row.process_model_identifier); + + const regex = new RegExp(`\\b(${preferredUsername}|${userEmail})\\b`); + let hasAccessToCompleteTask = false; + if (row.potential_owner_usernames.match(regex)) { + hasAccessToCompleteTask = true; + } return ( - + - {rowToUse.process_instance_id} + {row.process_instance_id} - {rowToUse.process_model_display_name} + {row.process_model_display_name} - {rowToUse.task_title} + {row.task_title} - {showStartedBy ? {rowToUse.username} : ''} - {showWaitingOn ? {rowToUse.group_identifier || '-'} : ''} + {showStartedBy ? {row.process_initiator_username} : ''} + {showWaitingOn ? {getWaitingForTableCellComponent(row)} : ''} - {convertSecondsToFormattedDateTime( - rowToUse.created_at_in_seconds - ) || '-'} + {convertSecondsToFormattedDateTime(row.created_at_in_seconds) || + '-'} diff --git a/spiffworkflow-frontend/src/config.tsx b/spiffworkflow-frontend/src/config.tsx index b0816a39d..abaadd5ef 100644 --- a/spiffworkflow-frontend/src/config.tsx +++ b/spiffworkflow-frontend/src/config.tsx @@ -1,11 +1,23 @@ -const host = window.location.hostname; -let hostAndPort = `api.${host}`; +const { port, hostname } = window.location; +let hostAndPort = `api.${hostname}`; let protocol = 'https'; -if (/^\d+\./.test(host) || host === 'localhost') { - hostAndPort = `${host}:7000`; + +if (/^\d+\./.test(hostname) || hostname === 'localhost') { + let serverPort = 7000; + if (!Number.isNaN(Number(port))) { + serverPort = Number(port) - 1; + } + hostAndPort = `${hostname}:${serverPort}`; protocol = 'http'; } -export const BACKEND_BASE_URL = `${protocol}://${hostAndPort}/v1.0`; + +let url = `${protocol}://${hostAndPort}/v1.0`; +// Allow overriding the backend base url with an environment variable at build time. +if (process.env.REACT_APP_BACKEND_BASE_URL) { + url = process.env.REACT_APP_BACKEND_BASE_URL; +} + +export const BACKEND_BASE_URL = url; export const PROCESS_STATUSES = [ 'not_started', diff --git a/spiffworkflow-frontend/src/helpers.test.tsx b/spiffworkflow-frontend/src/helpers.test.tsx index 5a0352b82..660f65f67 100644 --- a/spiffworkflow-frontend/src/helpers.test.tsx +++ b/spiffworkflow-frontend/src/helpers.test.tsx @@ -1,4 +1,8 @@ -import { convertSecondsToFormattedDateString, slugifyString } from './helpers'; +import { + convertSecondsToFormattedDateString, + slugifyString, + underscorizeString, +} from './helpers'; test('it can slugify a string', () => { expect(slugifyString('hello---world_ and then Some such-')).toEqual( @@ -6,6 +10,12 @@ test('it can slugify a string', () => { ); }); +test('it can underscorize a string', () => { + expect(underscorizeString('hello---world_ and then Some such-')).toEqual( + 'hello_world_and_then_some_such' + ); +}); + test('it can keep the correct date when converting seconds to date', () => { const dateString = convertSecondsToFormattedDateString(1666325400); expect(dateString).toEqual('2022-10-21'); diff --git a/spiffworkflow-frontend/src/helpers.tsx b/spiffworkflow-frontend/src/helpers.tsx index 8f6255335..d91f05439 100644 --- a/spiffworkflow-frontend/src/helpers.tsx +++ b/spiffworkflow-frontend/src/helpers.tsx @@ -8,6 +8,7 @@ import { DEFAULT_PER_PAGE, DEFAULT_PAGE, } from './components/PaginationForTable'; +import { ErrorForDisplay } from './interfaces'; // https://www.30secondsofcode.org/js/s/slugify export const slugifyString = (str: any) => { @@ -20,6 +21,10 @@ export const slugifyString = (str: any) => { .replace(/-+$/g, ''); }; +export const underscorizeString = (inputString: string) => { + return slugifyString(inputString).replace(/-/g, '_'); +}; + export const capitalizeFirstLetter = (string: any) => { return string.charAt(0).toUpperCase() + string.slice(1); }; @@ -234,3 +239,17 @@ export const getBpmnProcessIdentifiers = (rootBpmnElement: any) => { childProcesses.push(rootBpmnElement.businessObject.id); return childProcesses; }; + +// Setting the error message state to the same string is still considered a change +// and re-renders the page so check the message first to avoid that. +export const setErrorMessageSafely = ( + newErrorMessageString: string, + oldErrorMessage: ErrorForDisplay, + errorMessageSetter: any +) => { + if (oldErrorMessage && oldErrorMessage.message === newErrorMessageString) { + return null; + } + errorMessageSetter({ message: newErrorMessageString }); + return null; +}; diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index 574eb4e9e..f8e5f07f8 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -16,7 +16,10 @@ export const useUriListForPermissions = () => { processInstanceReportListPath: '/v1.0/process-instances/reports', processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`, processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`, + processInstanceResetPath: `/v1.0/process-instance-reset/${params.process_model_id}/${params.process_instance_id}`, processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceSendEventPath: `/v1.0/send-event/${params.process_model_id}/${params.process_instance_id}`, + processInstanceCompleteTaskPath: `/v1.0/complete-task/${params.process_model_id}/${params.process_instance_id}`, processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, processInstanceTaskListForMePath: `/v1.0/process-instances/for-me/${params.process_model_id}/${params.process_instance_id}/task-info`, processInstanceTerminatePath: `/v1.0/process-instance-terminate/${params.process_model_id}/${params.process_instance_id}`, diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 248a23d7d..08e8341cf 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -355,8 +355,8 @@ svg.notification-icon { word-break: normal; } -.combo-box-in-modal { - height: 300px; +.vertical-spacer-to-allow-combo-box-to-expand-in-modal { + height: 250px; } .cds--btn.narrow-button { diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 7805249be..630db8da1 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -1,3 +1,8 @@ +export interface User { + id: number; + username: string; +} + export interface Secret { id: number; key: string; @@ -5,6 +10,11 @@ export interface Secret { creator_user_id: string; } +export interface ProcessData { + process_data_identifier: string; + process_data_value: any; +} + export interface RecentProcessModel { processGroupIdentifier?: string; processModelIdentifier: string; @@ -12,16 +22,23 @@ export interface RecentProcessModel { } export interface ProcessInstanceTask { - id: string; + id: number; + task_id: string; + process_instance_id: number; process_model_display_name: string; process_model_identifier: string; task_title: string; lane_assignment_id: string; - process_instance_status: number; - updated_at_in_seconds: number; + process_instance_status: string; state: string; process_identifier: string; name: string; + process_initiator_username: string; + assigned_user_group_identifier: string; + created_at_in_seconds: number; + updated_at_in_seconds: number; + current_user_is_potential_owner: number; + potential_owner_usernames: string; } export interface ProcessReference { @@ -153,6 +170,10 @@ export type HotCrumbItem = HotCrumbItemArray | HotCrumbItemObject; export interface ErrorForDisplay { message: string; sentry_link?: string; + task_name?: string; + task_id?: string; + line_number?: number; + file_name?: string; } export interface AuthenticationParam { diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index d24c2b6e2..2d61439bf 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -22,14 +22,15 @@ import ProcessInstanceLogList from './ProcessInstanceLogList'; import MessageInstanceList from './MessageInstanceList'; import Configuration from './Configuration'; import JsonSchemaFormBuilder from './JsonSchemaFormBuilder'; +import ProcessModelNewExperimental from './ProcessModelNewExperimental'; export default function AdminRoutes() { const location = useLocation(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; useEffect(() => { - setErrorMessage(null); - }, [location, setErrorMessage]); + setErrorObject(null); + }, [location, setErrorObject]); if (UserService.hasRole(['admin'])) { return ( @@ -50,6 +51,10 @@ export default function AdminRoutes() { path="process-models/:process_group_id/new" element={} /> + } + /> } diff --git a/spiffworkflow-frontend/src/routes/AuthenticationList.tsx b/spiffworkflow-frontend/src/routes/AuthenticationList.tsx index 4f320df4b..a0d151018 100644 --- a/spiffworkflow-frontend/src/routes/AuthenticationList.tsx +++ b/spiffworkflow-frontend/src/routes/AuthenticationList.tsx @@ -7,7 +7,7 @@ import HttpService from '../services/HttpService'; import UserService from '../services/UserService'; export default function AuthenticationList() { - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [authenticationList, setAuthenticationList] = useState< AuthenticationItem[] | null @@ -26,9 +26,9 @@ export default function AuthenticationList() { HttpService.makeCallToBackend({ path: `/authentications`, successCallback: processResult, - failureCallback: setErrorMessage, + failureCallback: setErrorObject, }); - }, [setErrorMessage]); + }, [setErrorObject]); const buildTable = () => { if (authenticationList) { diff --git a/spiffworkflow-frontend/src/routes/Configuration.tsx b/spiffworkflow-frontend/src/routes/Configuration.tsx index b2e30416d..bd9e59c50 100644 --- a/spiffworkflow-frontend/src/routes/Configuration.tsx +++ b/spiffworkflow-frontend/src/routes/Configuration.tsx @@ -14,7 +14,7 @@ import { usePermissionFetcher } from '../hooks/PermissionService'; export default function Configuration() { const location = useLocation(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [selectedTabIndex, setSelectedTabIndex] = useState(0); const navigate = useNavigate(); @@ -26,13 +26,13 @@ export default function Configuration() { const { ability } = usePermissionFetcher(permissionRequestData); useEffect(() => { - setErrorMessage(null); + setErrorObject(null); let newSelectedTabIndex = 0; if (location.pathname.match(/^\/admin\/configuration\/authentications\b/)) { newSelectedTabIndex = 1; } setSelectedTabIndex(newSelectedTabIndex); - }, [location, setErrorMessage]); + }, [location, setErrorObject]); return ( <> diff --git a/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx b/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx index 872a7a69c..0475d4c75 100644 --- a/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx @@ -11,12 +11,12 @@ import CreateNewInstance from './CreateNewInstance'; export default function HomePageRoutes() { const location = useLocation(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [selectedTabIndex, setSelectedTabIndex] = useState(0); const navigate = useNavigate(); useEffect(() => { - setErrorMessage(null); + setErrorObject(null); let newSelectedTabIndex = 0; if (location.pathname.match(/^\/tasks\/completed-instances\b/)) { newSelectedTabIndex = 1; @@ -24,7 +24,7 @@ export default function HomePageRoutes() { newSelectedTabIndex = 2; } setSelectedTabIndex(newSelectedTabIndex); - }, [location, setErrorMessage]); + }, [location, setErrorObject]); const renderTabs = () => { if (location.pathname.match(/^\/tasks\/\d+\/\b/)) { diff --git a/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx b/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx index 6d1011014..d4a9c2b44 100644 --- a/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx +++ b/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx @@ -3,7 +3,11 @@ import { useEffect, useState } from 'react'; import { Button, Select, SelectItem, TextInput } from '@carbon/react'; import { useParams } from 'react-router-dom'; import { FormField } from '../interfaces'; -import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers'; +import { + modifyProcessIdentifierForPathParam, + slugifyString, + underscorizeString, +} from '../helpers'; import HttpService from '../services/HttpService'; export default function JsonSchemaFormBuilder() { @@ -75,7 +79,7 @@ export default function JsonSchemaFormBuilder() { formFieldIdHasBeenUpdatedByUser ); if (!formFieldIdHasBeenUpdatedByUser) { - setFormFieldId(slugifyString(newFormFieldTitle)); + setFormFieldId(underscorizeString(newFormFieldTitle)); } setFormFieldTitle(newFormFieldTitle); }; diff --git a/spiffworkflow-frontend/src/routes/MyTasks.tsx b/spiffworkflow-frontend/src/routes/MyTasks.tsx index 4c1cbc9bf..3daaaef6a 100644 --- a/spiffworkflow-frontend/src/routes/MyTasks.tsx +++ b/spiffworkflow-frontend/src/routes/MyTasks.tsx @@ -2,6 +2,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Button, Table } from '@carbon/react'; import { Link, useSearchParams } from 'react-router-dom'; +import { Notification } from '../components/Notification'; import PaginationForTable from '../components/PaginationForTable'; import { getPageInfoFromSearchParams, @@ -51,20 +52,19 @@ export default function MyTasks() { const processInstanceRunResultTag = () => { if (processInstance) { return ( -
-

- Process Instance {processInstance.id} kicked off ( - - view - - ). -

-
+ setProcessInstance(null)} + > + + view + + ); } return null; diff --git a/spiffworkflow-frontend/src/routes/ProcessGroupList.tsx b/spiffworkflow-frontend/src/routes/ProcessGroupList.tsx index d9ceaf597..7dee4f203 100644 --- a/spiffworkflow-frontend/src/routes/ProcessGroupList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessGroupList.tsx @@ -39,7 +39,7 @@ export default function ProcessGroupList() { }; // for search box HttpService.makeCallToBackend({ - path: `/process-models?per_page=1000&recursive=true`, + path: `/process-models?per_page=1000&recursive=true&include_parent_groups=true`, successCallback: processResultForProcessModels, }); }, [searchParams]); diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx index 51c099815..a18f48c80 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx @@ -67,6 +67,7 @@ export default function ProcessInstanceList({ variant }: OwnProps) { { navigate('/admin/process-instances/for-me'); }} @@ -76,6 +77,7 @@ export default function ProcessInstanceList({ variant }: OwnProps) { { navigate('/admin/process-instances/all'); }} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 37ef5519c..b4a4f683a 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -45,7 +45,7 @@ export default function ProcessInstanceLogList() { const rowToUse = row as any; return ( - {rowToUse.id} + {rowToUse.id} {rowToUse.message} {rowToUse.bpmn_task_name} {isDetailedView && ( @@ -114,6 +114,7 @@ export default function ProcessInstanceLogList() { { searchParams.set('detailed', 'false'); setSearchParams(searchParams); @@ -123,6 +124,7 @@ export default function ProcessInstanceLogList() { { searchParams.set('detailed', 'true'); setSearchParams(searchParams); diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx index 906fb3142..b753d3074 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx @@ -31,9 +31,7 @@ export default function ProcessInstanceReportList() { return ( - + {rowToUse.identifier} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index e4e1ffa23..678ebdf2a 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -25,6 +25,7 @@ import { ButtonSet, Tag, Modal, + Dropdown, Stack, // @ts-ignore } from '@carbon/react'; @@ -41,6 +42,7 @@ import ErrorContext from '../contexts/ErrorContext'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { PermissionsToCheck, + ProcessData, ProcessInstance, ProcessInstanceTask, } from '../interfaces'; @@ -62,9 +64,16 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const [tasksCallHadError, setTasksCallHadError] = useState(false); const [taskToDisplay, setTaskToDisplay] = useState(null); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); + const [processDataToDisplay, setProcessDataToDisplay] = + useState(null); const [editingTaskData, setEditingTaskData] = useState(false); + const [selectingEvent, setSelectingEvent] = useState(false); + const [eventToSend, setEventToSend] = useState({}); + const [eventPayload, setEventPayload] = useState('{}'); + const [eventTextEditorEnabled, setEventTextEditorEnabled] = + useState(false); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const unModifiedProcessModelId = unModifyProcessIdentifierForPathParam( `${params.process_model_id}` @@ -78,15 +87,18 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { : targetUris.processInstanceTaskListForMePath; const permissionRequestData: PermissionsToCheck = { - [targetUris.messageInstanceListPath]: ['GET'], - [taskListPath]: ['GET'], - [targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'], - [targetUris.processInstanceActionPath]: ['DELETE'], - [targetUris.processInstanceLogListPath]: ['GET'], - [targetUris.processModelShowPath]: ['PUT'], [`${targetUris.processInstanceResumePath}`]: ['POST'], [`${targetUris.processInstanceSuspendPath}`]: ['POST'], [`${targetUris.processInstanceTerminatePath}`]: ['POST'], + [targetUris.processInstanceResetPath]: ['POST'], + [targetUris.messageInstanceListPath]: ['GET'], + [targetUris.processInstanceActionPath]: ['DELETE'], + [targetUris.processInstanceLogListPath]: ['GET'], + [targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'], + [targetUris.processInstanceSendEventPath]: ['POST'], + [targetUris.processInstanceCompleteTaskPath]: ['POST'], + [targetUris.processModelShowPath]: ['PUT'], + [taskListPath]: ['GET'], }; const { ability, permissionsLoaded } = usePermissionFetcher( permissionRequestData @@ -250,6 +262,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return spiffStepLink(, 1); }; + const resetProcessInstance = () => { + HttpService.makeCallToBackend({ + path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`, + successCallback: refreshPage, + httpMethod: 'POST', + }); + }; + const getInfoTag = () => { if (!processInstance) { return null; @@ -411,16 +431,50 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } }; + const handleProcessDataDisplayClose = () => { + setProcessDataToDisplay(null); + }; + + const processDataDisplayArea = () => { + if (processDataToDisplay) { + return ( + +

Data Object: {processDataToDisplay.process_data_identifier}

+
+

Value:

+
{JSON.stringify(processDataToDisplay.process_data_value)}
+
+ ); + } + return null; + }; + + const handleProcessDataShowResponse = (processData: ProcessData) => { + setProcessDataToDisplay(processData); + }; + const handleClickedDiagramTask = ( shapeElement: any, bpmnProcessIdentifiers: any ) => { - if (tasks) { - const matchingTask: any = tasks.find( - (task: any) => + if (shapeElement.type === 'bpmn:DataObjectReference') { + const dataObjectIdentifer = shapeElement.businessObject.dataObjectRef.id; + HttpService.makeCallToBackend({ + path: `/process-data/${params.process_model_id}/${params.process_instance_id}/${dataObjectIdentifer}`, + httpMethod: 'GET', + successCallback: handleProcessDataShowResponse, + }); + } else if (tasks) { + const matchingTask: any = tasks.find((task: any) => { + return ( task.name === shapeElement.id && bpmnProcessIdentifiers.includes(task.process_identifier) - ); + ); + }); if (matchingTask) { setTaskToDisplay(matchingTask); initializeTaskDataToDisplay(matchingTask); @@ -471,10 +525,63 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const cancelEditingTaskData = () => { + const canSendEvent = (task: any) => { + // We actually could allow this for any waiting events + const taskTypes = ['Event Based Gateway']; + return ( + processInstance && + processInstance.status === 'waiting' && + ability.can('POST', targetUris.processInstanceSendEventPath) && + taskTypes.filter((t) => t === task.type).length > 0 && + task.state === 'WAITING' && + showingLastSpiffStep() + ); + }; + + const canCompleteTask = (task: any) => { + return ( + processInstance && + processInstance.status === 'suspended' && + ability.can('POST', targetUris.processInstanceCompleteTaskPath) && + task.state === 'READY' && + showingLastSpiffStep() + ); + }; + + const canResetProcess = (task: any) => { + return ( + ability.can('POST', targetUris.processInstanceResetPath) && + processInstance && + processInstance.status === 'suspended' && + task.state === 'READY' && + !showingLastSpiffStep() + ); + }; + + const getEvents = (task: any) => { + const handleMessage = (eventDefinition: any) => { + if (eventDefinition.typename === 'MessageEventDefinition') { + const newEvent = eventDefinition; + delete newEvent.message_var; + newEvent.payload = {}; + return newEvent; + } + return eventDefinition; + }; + if (task.event_definition && task.event_definition.event_definitions) + return task.event_definition.event_definitions.map((e: any) => + handleMessage(e) + ); + if (task.event_definition) return [handleMessage(task.event_definition)]; + return []; + }; + + const cancelUpdatingTask = () => { setEditingTaskData(false); + setSelectingEvent(false); initializeTaskDataToDisplay(taskToDisplay); - setErrorMessage(null); + setEventPayload('{}'); + setErrorObject(null); }; const taskDataStringToObject = (dataString: string) => { @@ -490,7 +597,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const saveTaskDataFailure = (result: any) => { - setErrorMessage({ message: result.message }); + setErrorObject({ message: result.message }); }; const saveTaskData = () => { @@ -498,12 +605,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return; } - setErrorMessage(null); + setErrorObject(null); // taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; HttpService.makeCallToBackend({ - path: `/task-data/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`, + path: `${targetUris.processInstanceTaskListDataPath}/${taskToUse.id}`, httpMethod: 'PUT', successCallback: saveTaskDataResult, failureCallback: saveTaskDataFailure, @@ -513,7 +620,30 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }); }; - const taskDataButtons = (task: any) => { + const sendEvent = () => { + if ('payload' in eventToSend) + eventToSend.payload = JSON.parse(eventPayload); + HttpService.makeCallToBackend({ + path: `/send-event/${modifiedProcessModelId}/${params.process_instance_id}`, + httpMethod: 'POST', + successCallback: saveTaskDataResult, + failureCallback: saveTaskDataFailure, + postBody: eventToSend, + }); + }; + + const completeTask = (execute: boolean) => { + const taskToUse: any = taskToDisplay; + HttpService.makeCallToBackend({ + path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`, + httpMethod: 'POST', + successCallback: saveTaskDataResult, + failureCallback: saveTaskDataFailure, + postBody: { execute }, + }); + }; + + const taskDisplayButtons = (task: any) => { const buttons = []; if ( @@ -542,22 +672,36 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); } - if (canEditTaskData(task)) { - if (editingTaskData) { - buttons.push( - - ); - buttons.push( - - ); - } else { + if (editingTaskData) { + buttons.push( + + ); + buttons.push( + + ); + } else if (selectingEvent) { + buttons.push( + + ); + buttons.push( + + ); + } else { + if (canEditTaskData(task)) { buttons.push( + ); + buttons.push( + + ); + } + if (canSendEvent(task)) { + buttons.push( + + ); + } + if (canResetProcess(task)) { + buttons.push( + + ); + } } return buttons; @@ -586,8 +768,42 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const taskDataDisplayArea = () => { + const eventSelector = (candidateEvents: any) => { + const editor = ( + setEventPayload(value || '{}')} + options={{ readOnly: !eventTextEditorEnabled }} + /> + ); + return selectingEvent ? ( + + item.name || item.label || item.typename} + onChange={(value: any) => { + setEventToSend(value.selectedItem); + setEventTextEditorEnabled( + value.selectedItem.typename === 'MessageEventDefinition' + ); + }} + /> + {editor} + + ) : ( + taskDataContainer() + ); + }; + + const taskUpdateDisplayArea = () => { const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + const candidateEvents: any = getEvents(taskToUse); if (taskToDisplay) { return ( {taskToUse.name} ({taskToUse.type}): {taskToUse.state} - {taskDataButtons(taskToUse)} + {taskDisplayButtons(taskToUse)} - {taskDataContainer()} + {selectingEvent + ? eventSelector(candidateEvents) + : taskDataContainer()} ); } @@ -686,7 +904,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
{getInfoTag()}
- {taskDataDisplayArea()} + {taskUpdateDisplayArea()} + {processDataDisplayArea()} {stepsElement()}
(null); const [processSearchElement, setProcessSearchElement] = useState(null); const [processes, setProcesses] = useState([]); + const [displaySaveFileMessage, setDisplaySaveFileMessage] = + useState(false); const handleShowMarkdownEditor = () => setShowMarkdownEditor(true); @@ -70,10 +81,10 @@ export default function ProcessModelEditDiagram() { interface ScriptUnitTestResult { result: boolean; - context: object; - error: string; - line_number: number; - offset: number; + context?: object; + error?: string; + line_number?: number; + offset?: number; } const [currentScriptUnitTest, setCurrentScriptUnitTest] = @@ -87,7 +98,7 @@ export default function ProcessModelEditDiagram() { const navigate = useNavigate(); const [searchParams] = useSearchParams(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [processModelFile, setProcessModelFile] = useState( null ); @@ -148,6 +159,7 @@ export default function ProcessModelEditDiagram() { }; const navigateToProcessModelFile = (_result: any) => { + setDisplaySaveFileMessage(true); if (!params.file_name) { const fileNameWithExtension = `${newFileName}.${searchParams.get( 'file_type' @@ -158,10 +170,9 @@ export default function ProcessModelEditDiagram() { } }; - const [displaySaveFileMessage, setDisplaySaveFileMessage] = - useState(false); const saveDiagram = (bpmnXML: any, fileName = params.file_name) => { - setErrorMessage(null); + setDisplaySaveFileMessage(false); + setErrorObject(null); setBpmnXmlForDiagramRendering(bpmnXML); let url = `/process-models/${modifiedProcessModelId}/files`; @@ -187,7 +198,7 @@ export default function ProcessModelEditDiagram() { HttpService.makeCallToBackend({ path: url, successCallback: navigateToProcessModelFile, - failureCallback: setErrorMessage, + failureCallback: setErrorObject, httpMethod, postBody: formData, }); @@ -195,7 +206,6 @@ export default function ProcessModelEditDiagram() { // after saving the file, make sure we null out newFileName // so it does not get used over the params setNewFileName(''); - setDisplaySaveFileMessage(true); }; const onDeleteFile = (fileName = params.file_name) => { @@ -401,6 +411,13 @@ export default function ProcessModelEditDiagram() { }; }; + const jsonEditorOptions = () => { + return Object.assign(generalEditorOptions(), { + minimap: { enabled: false }, + folding: true, + }); + }; + const setPreviousScriptUnitTest = () => { resetUnitTextResult(); const newScriptIndex = currentScriptUnitTestIndex - 1; @@ -461,6 +478,21 @@ export default function ProcessModelEditDiagram() { const runCurrentUnitTest = () => { if (currentScriptUnitTest && scriptElement) { + let inputJson = ''; + let expectedJson = ''; + try { + inputJson = JSON.parse(currentScriptUnitTest.inputJson.value); + expectedJson = JSON.parse( + currentScriptUnitTest.expectedOutputJson.value + ); + } catch (e) { + setScriptUnitTestResult({ + result: false, + error: 'The JSON provided contains a formatting error.', + }); + return; + } + resetUnitTextResult(); HttpService.makeCallToBackend({ path: `/process-models/${modifiedProcessModelId}/script-unit-tests/run`, @@ -469,37 +501,56 @@ export default function ProcessModelEditDiagram() { postBody: { bpmn_task_identifier: (scriptElement as any).id, python_script: scriptText, - input_json: JSON.parse(currentScriptUnitTest.inputJson.value), - expected_output_json: JSON.parse( - currentScriptUnitTest.expectedOutputJson.value - ), + input_json: inputJson, + expected_output_json: expectedJson, }, }); } }; const unitTestFailureElement = () => { - if ( - scriptUnitTestResult && - scriptUnitTestResult.result === false && - !scriptUnitTestResult.line_number - ) { - let errorStringElement = null; - if (scriptUnitTestResult.error) { - errorStringElement = ( - - Received error when running script:{' '} - {JSON.stringify(scriptUnitTestResult.error)} - - ); - } - let errorContextElement = null; + if (scriptUnitTestResult && scriptUnitTestResult.result === false) { + let errorObject = ''; if (scriptUnitTestResult.context) { + errorObject = 'Unexpected result. Please see the comparison below.'; + } else if (scriptUnitTestResult.line_number) { + errorObject = `Error encountered running the script. Please check the code around line ${scriptUnitTestResult.line_number}`; + } else { + errorObject = `Error encountered running the script. ${JSON.stringify( + scriptUnitTestResult.error + )}`; + } + let errorStringElement = {errorObject}; + + let errorContextElement = null; + + if (scriptUnitTestResult.context) { + errorStringElement = ( + Unexpected result. Please see the comparison below. + ); + let outputJson = '{}'; + if (currentScriptUnitTest) { + outputJson = JSON.stringify( + JSON.parse(currentScriptUnitTest.expectedOutputJson.value), + null, + ' ' + ); + } + const contextJson = JSON.stringify( + scriptUnitTestResult.context, + null, + ' ' + ); errorContextElement = ( - - Received unexpected output:{' '} - {JSON.stringify(scriptUnitTestResult.context)} - + ); } return ( @@ -543,19 +594,35 @@ export default function ProcessModelEditDiagram() { ); } + let inputJson = currentScriptUnitTest.inputJson.value; + let outputJson = currentScriptUnitTest.expectedOutputJson.value; + try { + inputJson = JSON.stringify( + JSON.parse(currentScriptUnitTest.inputJson.value), + null, + ' ' + ); + outputJson = JSON.stringify( + JSON.parse(currentScriptUnitTest.expectedOutputJson.value), + null, + ' ' + ); + } catch (e) { + // Attemping to format the json failed -- it's invalid. + } + return (
-
); } return null; }; - const scriptEditor = () => { + return ( + + ); + }; + const scriptEditorAndTests = () => { let scriptName = ''; if (scriptElement) { scriptName = (scriptElement as any).di.bpmnElement.name; } - return ( - - {scriptUnitTestEditorElement()} + + + Script Editor + Unit Tests + + + {scriptEditor()} + {scriptUnitTestEditorElement()} + + ); }; @@ -878,7 +954,7 @@ export default function ProcessModelEditDiagram() { {saveFileMessage()} {appropriateEditor()} {newFileNameBox()} - {scriptEditor()} + {scriptEditorAndTests()} {markdownEditor()} {processModelSelector()}
diff --git a/spiffworkflow-frontend/src/routes/ProcessModelNewExperimental.tsx b/spiffworkflow-frontend/src/routes/ProcessModelNewExperimental.tsx new file mode 100644 index 000000000..af8be8226 --- /dev/null +++ b/spiffworkflow-frontend/src/routes/ProcessModelNewExperimental.tsx @@ -0,0 +1,73 @@ +import { useState } from 'react'; +import { useNavigate, useParams } from 'react-router-dom'; +// @ts-ignore +import { TextArea, Button, Form } from '@carbon/react'; +import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; +import { ProcessModel } from '../interfaces'; +import { modifyProcessIdentifierForPathParam } from '../helpers'; +import HttpService from '../services/HttpService'; + +export default function ProcessModelNewExperimental() { + const params = useParams(); + const navigate = useNavigate(); + const [processModelDescriptiveText, setProcessModelDescriptiveText] = + useState(''); + + const helperText = + 'Create a bug tracker process model with a bug-details form that collects summary, description, and priority'; + + const navigateToProcessModel = (result: ProcessModel) => { + if ('id' in result) { + const modifiedProcessModelPathFromResult = + modifyProcessIdentifierForPathParam(result.id); + navigate(`/admin/process-models/${modifiedProcessModelPathFromResult}`); + } + }; + + const handleFormSubmission = (event: any) => { + event.preventDefault(); + HttpService.makeCallToBackend({ + path: `/process-models-natural-language/${params.process_group_id}`, + successCallback: navigateToProcessModel, + httpMethod: 'POST', + postBody: { natural_language_text: processModelDescriptiveText }, + }); + }; + + const ohYeeeeaah = () => { + setProcessModelDescriptiveText(helperText); + }; + + return ( + <> + + {/* eslint-disable-next-line jsx-a11y/no-noninteractive-element-interactions */} +

+ Add Process Model +

+
+