Merge branch 'main' into feature/process-navigation

This commit is contained in:
Elizabeth Esswein 2022-12-29 12:03:28 -05:00
commit f24d751450
110 changed files with 3338 additions and 1400 deletions

View File

@ -10,9 +10,9 @@ services:
environment:
- MYSQL_DATABASE=spiffworkflow_backend_development
- MYSQL_ROOT_PASSWORD=my-secret-pw
- MYSQL_TCP_PORT=7003
- MYSQL_TCP_PORT=8003
ports:
- "7003"
- "8003"
healthcheck:
test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development
interval: 10s
@ -30,12 +30,12 @@ services:
- SPIFFWORKFLOW_BACKEND_ENV=development
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=super_secret_key
- OPEN_ID_SERVER_URL=http://localhost:7000/openid
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000
- SPIFFWORKFLOW_BACKEND_PORT=7000
- OPEN_ID_SERVER_URL=http://localhost:8000/openid
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:8001
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:8000
- SPIFFWORKFLOW_BACKEND_PORT=8000
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:8003/spiffworkflow_backend_development
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false
- SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml
@ -43,12 +43,12 @@ services:
- OPEN_ID_CLIENT_ID=spiffworkflow-backend
- OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key
ports:
- "7000:7000"
- "8000:8000"
volumes:
- ./process_models:/app/process_models
- ./log:/app/log
healthcheck:
test: curl localhost:7000/v1.0/status --fail
test: curl localhost:8000/v1.0/status --fail
interval: 10s
timeout: 5s
retries: 20
@ -58,9 +58,9 @@ services:
image: ghcr.io/sartography/spiffworkflow-frontend
environment:
- APPLICATION_ROOT=/
- PORT0=7001
- PORT0=8001
ports:
- "7001:7001"
- "8001:8001"
spiffworkflow-connector:
container_name: spiffworkflow-connector
@ -69,10 +69,11 @@ services:
- FLASK_ENV=${FLASK_ENV:-development}
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
- CONNECTOR_PROXY_PORT=8004
ports:
- "7004:7004"
- "8004:8004"
healthcheck:
test: curl localhost:7004/liveness --fail
test: curl localhost:8004/liveness --fail
interval: 10s
timeout: 5s
retries: 20

81
flask-bpmn/poetry.lock generated
View File

@ -813,22 +813,6 @@ category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "libcst"
version = "0.4.3"
description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
pyyaml = ">=5.2"
typing-extensions = ">=3.7.4.2"
typing-inspect = ">=0.4.0"
[package.extras]
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
[[package]]
name = "livereload"
version = "2.6.3"
@ -905,18 +889,6 @@ category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "monkeytype"
version = "22.2.0"
description = "Generating type annotations from sampled production types"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
libcst = ">=0.3.7"
mypy-extensions = "*"
[[package]]
name = "mypy"
version = "0.991"
@ -1504,7 +1476,7 @@ test = ["pytest"]
[[package]]
name = "SpiffWorkflow"
version = "1.2.1"
description = ""
description = "A workflow framework and BPMN/DMN Processor"
category = "main"
optional = false
python-versions = "*"
@ -1520,7 +1492,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46"
resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994"
[[package]]
name = "sqlalchemy"
@ -1627,18 +1599,6 @@ category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
name = "typing-inspect"
version = "0.7.1"
description = "Runtime inspection utilities for typing module."
category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
mypy-extensions = ">=0.3.0"
typing-extensions = ">=3.7.4"
[[package]]
name = "unidecode"
version = "1.3.4"
@ -1770,7 +1730,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d"
content-hash = "45cac5741fa47e44710f5aae6dfdb4636fc4d60df2d6aba467052fdd5199e791"
[metadata.files]
alabaster = [
@ -2234,32 +2194,6 @@ lazy-object-proxy = [
{file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"},
{file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
]
libcst = [
{file = "libcst-0.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bea98a8be2b1725784ae01e89519121eba7d81280dcbee40ae03ececd7277cf3"},
{file = "libcst-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d9191c764645dddf94d49885e590433fa0ee6d347b07eec86566786e6d2ada5"},
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f22e9787e44304e7cd9744e543602ab2c1bca8b922cb6237ea08d9a0be3fdd"},
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff147dd77b6ea72e4f2f0abfcd1be11a3108c28cb65e6da666c0b77142033f7c"},
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d744d4a6301c75322f1d88365dccfe402a51e724583a2edc4cba474462cc9419"},
{file = "libcst-0.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:ed0f15545eddfdd6270069ce0b2d4c253298817bd676a1a6adddaa1d66c7e28b"},
{file = "libcst-0.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6f57056a743853c01bbd21bfd96c2a1b4c317bbc66920f5f2c9999b3dca7233"},
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3d33da8f9b088e118bfc6ecacdd627ac237baeb490f4d7a383af4df4ea4f82"},
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df5f51a837fc10cdbf5c61acb467f6c15d5f9ca1d94a84a6a29c4f20ce7b437e"},
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f744f60057c8998b856d9baf28765c65574992f4a49830ca350010fc31f4eac4"},
{file = "libcst-0.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:88ab371aab82f7241448e263ec42abced649a77cdd21df960268e6df70b3f3f7"},
{file = "libcst-0.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:826ea5f10a84625db861ccf35946317f4f29e575261e44c0cd6c24c4dde5c2bb"},
{file = "libcst-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab5b23796ce66303398bb7b2d27bcb17d2416dacd3d00229c961aed87d79a3b"},
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afc793c95af79e5adc5905713ccddff034d0de3e3da748424b722edf890227de"},
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c982387b8e23ad18efbd0287004924931a0b05c91ed5630453faf224bb0b185"},
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4c25aca45df5f86a6a1c8c219e8c7a90acdaef02b53eb01eafa563381cb0ce"},
{file = "libcst-0.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1a395129ecf6c6ce429427f34100ccd99f35898a98187764a4559d9f92166cd0"},
{file = "libcst-0.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ca00819affafccb02b2582ec47706712b995c9887cad02bb8efe94a066830f37"},
{file = "libcst-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:231a9ca446570f9b63d8c2c6dbf6c796fb939a5e4ef9dc0dd9304a21a6c0da16"},
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b08e7a56950479c856183ad6fdf0a21df028d6732e1d19822ec1593e32f700ca"},
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cb70e7e5118234e75d309fcf04931e20f282f16c80dda464fc1b88ef02e52e4"},
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8c00b24ab39facff463b18b9abc8df7dd063ae0ce9fe2e78e199c9a8572e37"},
{file = "libcst-0.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:28f35b9a21b2f8982a8ed3f53b1fdbc5435252409d34d061a3229dc4b413b8c7"},
{file = "libcst-0.4.3.tar.gz", hash = "sha256:f79ab61287505d97ed57ead14b78777f48cd6ec5339ca4978987e4c35957a465"},
]
livereload = [
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
]
@ -2389,10 +2323,6 @@ mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
monkeytype = [
{file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"},
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
]
mypy = [
{file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
{file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
@ -2808,11 +2738,6 @@ typing-extensions = [
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
]
typing-inspect = [
{file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"},
{file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"},
{file = "typing_inspect-0.7.1.tar.gz", hash = "sha256:047d4097d9b17f46531bf6f014356111a1b6fb821a24fe7ac909853ca2a782aa"},
]
unidecode = [
{file = "Unidecode-1.3.4-py3-none-any.whl", hash = "sha256:afa04efcdd818a93237574791be9b2817d7077c25a068b00f8cff7baa4e59257"},
{file = "Unidecode-1.3.4.tar.gz", hash = "sha256:8e4352fb93d5a735c788110d2e7ac8e8031eb06ccbfe8d324ab71735015f9342"},

View File

@ -175,6 +175,10 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
id = capture_exception(exception)
if isinstance(exception, ApiError):
current_app.logger.info(
f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}")
organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
if organization_slug and project_slug:

View File

@ -1,7 +1,7 @@
.mypy_cache/
/.idea/
/.coverage
/.coverage.*
.coverage.*
/.nox/
/.python-version
/.pytype/

View File

@ -7,7 +7,8 @@ def main() -> None:
"""Main."""
app = get_hacked_up_app_for_script()
with app.app_context():
AuthorizationService.delete_all_permissions_and_recreate()
AuthorizationService.delete_all_permissions()
AuthorizationService.import_permissions_from_yaml_file()
if __name__ == "__main__":

View File

@ -1,5 +1,4 @@
"""Get the bpmn process json for a given process instance id and store it in /tmp."""
#!/usr/bin/env python
import os
import sys
@ -18,15 +17,17 @@ def main(process_instance_id: str):
id=process_instance_id
).first()
file_path = f"/tmp/{process_instance_id}_bpmn_json.json"
if not process_instance:
raise Exception(
f"Could not find a process instance with id: {process_instance_id}"
)
with open(
f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8"
file_path, "w", encoding="utf-8"
) as f:
f.write(process_instance.bpmn_json)
print(f"Saved to {file_path}")
if len(sys.argv) < 2:

View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
set -x
mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;'

View File

@ -61,3 +61,7 @@ for task in $tasks; do
done
SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
fi

View File

@ -426,6 +426,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "admin@spiffworkflow.org",
"credentials" : [ {
"id" : "ef435043-ef0c-407a-af5b-ced13182a408",
"type" : "password",
@ -446,6 +447,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "alex@sartography.com",
"credentials" : [ {
"id" : "81a61a3b-228d-42b3-b39a-f62d8e7f57ca",
"type" : "password",
@ -465,6 +467,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "amir@status.im",
"credentials" : [ {
"id" : "e589f3ad-bf7b-4756-89f7-7894c03c2831",
"type" : "password",
@ -484,6 +487,9 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "ciadmin1@spiffworkflow.org",
"credentials" : [ {
"id" : "111b5ea1-c2ab-470a-a16b-2373bc94de7a",
"type" : "password",
@ -499,28 +505,6 @@
},
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "56457e8f-47c6-4f9f-a72b-473dea5edfeb",
"createdTimestamp" : 1657139955336,
"username" : "ciuser1",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"credentials" : [ {
"id" : "762f36e9-47af-44da-8520-cf09d752497a",
"type" : "password",
"createdDate" : 1657139966468,
"secretData" : "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"clientRoles" : {
"spiffworkflow-backend" : [ "uma_protection" ]
},
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "d58b61cc-a77e-488f-a427-05f4e0572e20",
"createdTimestamp" : 1669132945413,
@ -530,6 +514,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "core@status.im",
"credentials" : [ {
"id" : "ee80092b-8ee6-4699-8492-566e088b48f5",
"type" : "password",
@ -550,6 +535,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "dan@sartography.com",
"credentials" : [ {
"id" : "d517c520-f500-4542-80e5-7144daef1e32",
"type" : "password",
@ -569,6 +555,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "daniel@sartography.com",
"credentials" : [ {
"id" : "f240495c-265b-42fc-99db-46928580d07d",
"type" : "password",
@ -588,6 +575,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "elizabeth@sartography.com",
"credentials" : [ {
"id" : "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2",
"type" : "password",
@ -609,6 +597,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "fin@status.im",
"credentials" : [ {
"id" : "2379940c-98b4-481a-b629-0bd1a4e91acf",
"type" : "password",
@ -631,6 +620,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "fin1@status.im",
"credentials" : [ {
"id" : "96216746-ff72-454e-8288-232428d10b42",
"type" : "password",
@ -651,6 +641,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "finance_user1@status.im",
"credentials" : [ {
"id" : "f14722ec-13a7-4d35-a4ec-0475d405ae58",
"type" : "password",
@ -670,6 +661,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "harmeet@status.im",
"credentials" : [ {
"id" : "89c26090-9bd3-46ac-b038-883d02e3f125",
"type" : "password",
@ -691,6 +683,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "j@status.im",
"credentials" : [ {
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
"type" : "password",
@ -711,6 +704,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "jakub@status.im",
"credentials" : [ {
"id" : "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2",
"type" : "password",
@ -730,6 +724,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "jarrad@status.im",
"credentials" : [ {
"id" : "113e0343-1069-476d-83f9-21d98edb9cfa",
"type" : "password",
@ -749,6 +744,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "jason@sartography.com",
"credentials" : [ {
"id" : "40abf32e-f0cc-4a17-8231-1a69a02c1b0b",
"type" : "password",
@ -768,6 +764,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "jon@sartography.com",
"credentials" : [ {
"id" : "8b520e01-5b9b-44ab-9ee8-505bd0831a45",
"type" : "password",
@ -787,6 +784,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "kb@sartography.com",
"credentials" : [ {
"id" : "2c0be363-038f-48f1-86d6-91fdd28657cf",
"type" : "password",
@ -808,6 +806,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "lead@status.im",
"credentials" : [ {
"id" : "96e836a4-1a84-45c5-a9ed-651b0c90195e",
"type" : "password",
@ -830,6 +829,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "lead1@status.im",
"credentials" : [ {
"id" : "4e17388b-6c44-44e1-b20a-a873c0feb9a8",
"type" : "password",
@ -850,6 +850,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "manuchehr@status.im",
"credentials" : [ {
"id" : "07dabf55-b5d3-4f98-abba-3334086ecf5e",
"type" : "password",
@ -869,6 +870,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "mike@sartography.com",
"credentials" : [ {
"id" : "1ed375fb-0f1a-4c2a-9243-2477242cf7bd",
"type" : "password",
@ -887,7 +889,10 @@
"username" : "natalia",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"emailVerified" : true,
"firstName" : "",
"lastName" : "",
"email" : "natalia@sartography.com",
"credentials" : [ {
"id" : "b6aa9936-39cc-4931-bfeb-60e6753de5ba",
"type" : "password",
@ -907,6 +912,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "sasha@status.im",
"credentials" : [ {
"id" : "4a170af4-6f0c-4e7b-b70c-e674edf619df",
"type" : "password",
@ -926,6 +932,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "service-account@status.im",
"serviceAccountClientId" : "spiffworkflow-backend",
"credentials" : [ ],
"disableableCredentialTypes" : [ ],
@ -943,6 +950,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "service-account-withauth@status.im",
"serviceAccountClientId" : "withAuth",
"credentials" : [ ],
"disableableCredentialTypes" : [ ],
@ -2166,7 +2174,7 @@
"subType" : "authenticated",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ]
"allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ]
}
}, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@ -2184,7 +2192,7 @@
"subType" : "anonymous",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
}
}, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@ -2274,7 +2282,7 @@
"internationalizationEnabled" : false,
"supportedLocales" : [ ],
"authenticationFlows" : [ {
"id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3",
"id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2",
"alias" : "Account verification options",
"description" : "Method with which to verity the existing account",
"providerId" : "basic-flow",
@ -2296,7 +2304,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "4da99e29-371e-4f4b-a863-e5079f30a714",
"id" : "ddf80243-ec40-4c21-ae94-2967d841f84c",
"alias" : "Authentication Options",
"description" : "Authentication options.",
"providerId" : "basic-flow",
@ -2325,7 +2333,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "d398c928-e201-4e8b-ab09-289bb351cd2e",
"id" : "4f075680-46b7-49eb-b94c-d7425f105cb9",
"alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2347,7 +2355,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d",
"id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed",
"alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2369,7 +2377,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "98013bc1-e4dd-41f7-9849-1f898143b944",
"id" : "07536fec-8d41-4c73-845f-ca85002022e0",
"alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2391,7 +2399,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2",
"id" : "f123f912-71fb-4596-97f9-c0628a59413d",
"alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow",
@ -2413,7 +2421,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "2470e6f4-9a01-476a-9057-75d78e577182",
"id" : "03c26cc5-366b-462d-9297-b4016f8d7c57",
"alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow",
@ -2435,7 +2443,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9",
"id" : "1b4f474e-aa64-45cc-90f1-63504585d89c",
"alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow",
@ -2458,7 +2466,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "97c83e43-cba8-4d92-b108-9181bca07a1e",
"id" : "38024dd6-daff-45de-8782-06b07b7bfa56",
"alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account",
"providerId" : "basic-flow",
@ -2480,7 +2488,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "fbabd64c-20de-4b8c-bfd2-be6822572278",
"id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7",
"alias" : "browser",
"description" : "browser based authentication",
"providerId" : "basic-flow",
@ -2516,7 +2524,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "0628a99f-b194-495d-8e54-cc4ca8684956",
"id" : "92e3571d-ac3e-4e79-a391-5315954e866f",
"alias" : "clients",
"description" : "Base authentication for clients",
"providerId" : "client-flow",
@ -2552,7 +2560,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a",
"id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f",
"alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow",
@ -2581,7 +2589,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa",
"id" : "95d2f1ff-6907-47ce-a93c-db462fe04844",
"alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow",
@ -2596,7 +2604,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9",
"id" : "27405ee8-5730-419c-944c-a7c67edd91ce",
"alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow",
@ -2619,7 +2627,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9",
"id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8",
"alias" : "forms",
"description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow",
@ -2641,7 +2649,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a",
"id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9",
"alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow",
@ -2663,7 +2671,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03",
"id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485",
"alias" : "registration",
"description" : "registration flow",
"providerId" : "basic-flow",
@ -2679,7 +2687,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3",
"id" : "99593c1e-f2a5-4198-ad41-634694259110",
"alias" : "registration form",
"description" : "registration form",
"providerId" : "form-flow",
@ -2715,7 +2723,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828",
"id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4",
"alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow",
@ -2751,7 +2759,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c",
"id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8",
"alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow",
@ -2767,13 +2775,13 @@
} ]
} ],
"authenticatorConfig" : [ {
"id" : "817a93da-29df-447f-ab05-cd9557e66745",
"id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa",
"alias" : "create unique user config",
"config" : {
"require.password.update.after.registration" : "false"
}
}, {
"id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878",
"id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2",
"alias" : "review profile config",
"config" : {
"update.profile.on.first.login" : "missing"

View File

@ -1,8 +1,8 @@
"""empty message
Revision ID: b86f7cc3a74b
Revision ID: 907bcf0c3d75
Revises:
Create Date: 2022-12-19 16:20:27.715487
Create Date: 2022-12-28 13:52:13.030028
"""
from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b86f7cc3a74b'
revision = '907bcf0c3d75'
down_revision = None
branch_labels = None
depends_on = None
@ -72,16 +72,15 @@ def upgrade():
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('uid', sa.String(length=50), nullable=True),
sa.Column('service', sa.String(length=50), nullable=False),
sa.Column('service', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('display_name', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('service', 'service_id', name='service_key'),
sa.UniqueConstraint('uid')
sa.UniqueConstraint('username')
)
op.create_table('message_correlation_property',
sa.Column('id', sa.Integer(), nullable=False),
@ -176,11 +175,20 @@ def upgrade():
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
)
op.create_table('user_group_assignment_waiting',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique')
)
op.create_table('human_task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
sa.Column('form_file_name', sa.String(length=50), nullable=True),
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
@ -193,6 +201,7 @@ def upgrade():
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id'),
@ -259,9 +268,6 @@ def upgrade():
sa.Column('spiff_step', sa.Integer(), nullable=False),
sa.Column('task_json', sa.JSON(), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
@ -310,6 +316,7 @@ def downgrade():
op.drop_table('message_correlation')
op.drop_index(op.f('ix_human_task_completed'), table_name='human_task')
op.drop_table('human_task')
op.drop_table('user_group_assignment_waiting')
op.drop_table('user_group_assignment')
op.drop_table('secret')
op.drop_table('refresh_token')

View File

@ -654,7 +654,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
resolved_reference = "0f2d249d0e799bec912d46132e9ef9754fdacbd7"
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
[[package]]
name = "Flask-Cors"
@ -1851,7 +1851,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994"
resolved_reference = "5c4592801fea56ba2f0c56df467f759bcfe47b7e"
[[package]]
name = "SQLAlchemy"

View File

@ -18,6 +18,7 @@ from werkzeug.exceptions import NotFound
import spiffworkflow_backend.load_database_models # noqa: F401
from spiffworkflow_backend.config import setup_config
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
openid_blueprint,
@ -117,7 +118,7 @@ def create_app() -> flask.app.Flask:
]
CORS(app, origins=origins_re, max_age=3600)
connexion_app.add_api("api.yml", base_path="/v1.0")
connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
mail = Mail(app)
app.config["MAIL_APP"] = mail

View File

@ -174,7 +174,7 @@ paths:
items:
$ref: "#/components/schemas/ProcessModelCategory"
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_create
summary: Add process group
tags:
- Process Groups
@ -601,7 +601,7 @@ paths:
description: Specifies the identifier of a report to use, if any
schema:
type: integer
- name: group_identifier
- name: user_group_identifier
in: query
required: false
description: The identifier of the group to get the process instances for
@ -714,7 +714,7 @@ paths:
description: Specifies the identifier of a report to use, if any
schema:
type: integer
- name: group_identifier
- name: user_group_identifier
in: query
required: false
description: The identifier of the group to get the process instances for
@ -1328,7 +1328,7 @@ paths:
/tasks/for-my-groups:
parameters:
- name: group_identifier
- name: user_group_identifier
in: query
required: false
description: The identifier of the group to get the tasks for
@ -1439,7 +1439,7 @@ paths:
schema:
type: string
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update
summary: Update the task data for requested instance and task
tags:
- Process Instances
@ -1451,6 +1451,39 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/process-data/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The modified id of an existing process model
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
- name: process_data_identifier
in: path
required: true
description: The identifier of the process data.
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_show
summary: Fetch the process data value.
tags:
- Data Objects
responses:
"200":
description: Fetch succeeded.
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
/send-event/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_identifier
@ -1749,7 +1782,7 @@ paths:
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.add_secret
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_create
summary: Create a secret for a key and value
tags:
- Secrets
@ -1799,7 +1832,7 @@ paths:
schema:
$ref: "#/components/schemas/Secret"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_secret
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_delete
summary: Delete an existing secret
tags:
- Secrets
@ -1811,7 +1844,7 @@ paths:
"404":
description: Secret does not exist
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.update_secret
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_update
summary: Modify an existing secret
tags:
- Secrets

View File

@ -42,6 +42,7 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
"""Load_config_file."""
try:
app.config.from_object(env_config_module)
print(f"loaded config: {env_config_module}")
except ImportStringError as exception:
if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
raise ModuleNotFoundError(
@ -62,6 +63,7 @@ def setup_config(app: Flask) -> None:
)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config.from_object("spiffworkflow_backend.config.default")
print("loaded config: default")
env_config_prefix = "spiffworkflow_backend.config."
if (
@ -69,6 +71,7 @@ def setup_config(app: Flask) -> None:
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
):
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
print("loaded config: terraform_deployed_environment")
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
load_config_file(app, env_config_module)
@ -87,6 +90,12 @@ def setup_config(app: Flask) -> None:
"permissions",
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
)
print(
f"set permissions file name config: {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}"
)
print(
f"set permissions file name full path: {app.config['PERMISSIONS_FILE_FULLPATH']}"
)
# unversioned (see .gitignore) config that can override everything and include secrets.
# src/spiffworkflow_backend/config/secrets.py

View File

@ -6,3 +6,4 @@ GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-commit
GIT_USER_EMAIL = environ.get(
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml"

View File

@ -1,13 +1,10 @@
groups:
admin:
users: [ciadmin1]
common-user:
users: [ciuser1]
users: [ciadmin1@spiffworkflow.org]
permissions:
admin:
groups: [admin, common-user]
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*

View File

@ -0,0 +1,151 @@
default_group: everybody
groups:
admin:
users:
[
admin@spiffworkflow.org,
jakub@status.im,
jarrad@status.im,
kb@sartography.com,
alex@sartography.com,
dan@sartography.com,
mike@sartography.com,
jason@sartography.com,
j@sartography.com,
elizabeth@sartography.com,
jon@sartography.com,
]
Finance Team:
users:
[
jakub@status.im,
amir@status.im,
jarrad@status.im,
sasha@status.im,
fin@status.im,
fin1@status.im,
alex@sartography.com,
dan@sartography.com,
mike@sartography.com,
jason@sartography.com,
j@sartography.com,
elizabeth@sartography.com,
jon@sartography.com,
]
demo:
users:
[
harmeet@status.im,
sasha@status.im,
manuchehr@status.im,
core@status.im,
fin@status.im,
fin1@status.im,
lead@status.im,
lead1@status.im,
]
test:
users:
[
natalia@sartography.com,
]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
# open system defaults for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /process-models/*
# basic perms for everybody
read-all-process-instances-for-me:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/*
read-process-instance-reports:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /processes
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /service-tasks
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /tasks/*
user-groups-for-current-user:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /user-groups/for-current-user
finance-admin:
groups: ["Finance Team"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /process-groups/manage-procurement:procurement:*
manage-revenue-streams-instances:
groups: ["demo"]
users: []
allowed_permissions: [create]
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
groups: ["demo"]
users: []
allowed_permissions: [create]
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
groups: ["demo"]
users: []
allowed_permissions: [create]
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
manage-revenue-streams-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
create-test-instances:
groups: ["test"]
users: []
allowed_permissions: [create, read]
uri: /process-instances/misc:test:*

View File

@ -10,54 +10,54 @@ groups:
admin:
users:
[
admin,
jakub,
kb,
alex,
dan,
mike,
jason,
jarrad,
elizabeth,
jon,
admin@spiffworkflow.org,
jakub@status.im,
jarrad@status.im,
kb@sartography.com,
alex@sartography.com,
dan@sartography.com,
mike@sartography.com,
jason@sartography.com,
j@sartography.com,
elizabeth@sartography.com,
jon@sartography.com,
]
Finance Team:
users:
[
jakub,
alex,
dan,
mike,
jason,
amir,
jarrad,
elizabeth,
jon,
sasha,
fin,
fin1,
jakub@status.im,
amir@status.im,
jarrad@status.im,
sasha@status.im,
fin@status.im,
fin1@status.im,
alex@sartography.com,
dan@sartography.com,
mike@sartography.com,
jason@sartography.com,
j@sartography.com,
elizabeth@sartography.com,
jon@sartography.com,
]
demo:
users:
[
core,
fin,
fin1,
harmeet,
jason,
sasha,
manuchehr,
lead,
lead1
harmeet@status.im,
sasha@status.im,
manuchehr@status.im,
core@status.im,
fin@status.im,
fin1@status.im,
lead@status.im,
lead1@status.im,
]
core-contributor:
test:
users:
[
core,
harmeet,
natalia@sartography.com,
]
admin-ro:
@ -66,16 +66,12 @@ groups:
j,
]
test:
users: [natalia]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
admin-readonly:
groups: [admin-ro]
users: []
@ -85,121 +81,93 @@ permissions:
groups: [admin-ro]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
uri: /process-instances/*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
user-groups-for-current-user:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/user-groups/for-current-user
# read all for everybody
# open system defaults for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-groups/*
uri: /process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-models/*
uri: /process-models/*
# basic perms for everybody
read-all-process-instances-for-me:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/for-me/*
uri: /process-instances/for-me/*
read-process-instance-reports:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/reports/*
uri: /process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/processes
uri: /processes
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /service-tasks
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /tasks/*
user-groups-for-current-user:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /user-groups/for-current-user
manage-procurement-admin:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement:*
manage-procurement-admin-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement/*
manage-procurement-admin-models:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/manage-procurement:*
manage-procurement-admin-models-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/manage-procurement/*
manage-procurement-admin-instances:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement:*
manage-procurement-admin-instances-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement/*
finance-admin:
groups: ["Finance Team"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement:procurement:*
uri: /process-groups/manage-procurement:procurement:*
manage-revenue-streams-instances:
groups: ["core-contributor", "demo"]
groups: ["demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
allowed_permissions: [create]
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
groups: ["core-contributor", "demo"]
groups: ["demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
allowed_permissions: [create]
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
groups: ["core-contributor", "demo"]
groups: ["demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
allowed_permissions: [create]
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
manage-revenue-streams-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
create-test-instances:
groups: ["test"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:test:*
core1-admin-instances:
groups: ["core-contributor", "Finance Team"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:*
core1-admin-instances-slash:
groups: ["core-contributor", "Finance Team"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
uri: /process-instances/misc:test:*

View File

@ -2,14 +2,17 @@ default_group: everybody
users:
admin:
service: local_open_id
email: admin@spiffworkflow.org
password: admin
preferred_username: Admin
nelson:
service: local_open_id
email: nelson@spiffworkflow.org
password: nelson
preferred_username: Nelson
malala:
service: local_open_id
email: malala@spiffworkflow.org
password: malala
preferred_username: Malala
@ -18,17 +21,17 @@ groups:
admin:
users:
[
admin,
admin@spiffworkflow.org,
]
Education:
users:
[
malala
malala@spiffworkflow.org
]
President:
users:
[
nelson
nelson@spiffworkflow.org
]
permissions:
@ -44,45 +47,44 @@ permissions:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
uri: /tasks/*
# Everyone can see everything (all groups, and processes are visible)
read-all-process-groups:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-groups/*
uri: /process-groups/*
read-all-process-models:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-models/*
uri: /process-models/*
read-all-process-instance:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-instances/*
uri: /process-instances/*
read-process-instance-reports:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-instances/reports/*
uri: /process-instances/reports/*
processes-read:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/processes
# Members of the Education group can change they processes work.
uri: /processes
# Members of the Education group can change the processes under "education".
education-admin:
groups: ["Education", "President"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/education:*
uri: /process-groups/education:*
# Anyone can start an education process.
education-everybody:
groups: [everybody]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
uri: /process-instances/misc:category_number_one:process-model-with-form/*

View File

@ -0,0 +1,12 @@
default_group: everybody
groups:
admin:
users: [admin@spiffworkflow.org]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*

View File

@ -4,57 +4,53 @@ groups:
admin:
users:
[
admin,
jakub,
kb,
alex,
dan,
mike,
jason,
j,
jarrad,
elizabeth,
jon,
natalia,
admin@spiffworkflow.org,
jakub@status.im,
jarrad@status.im,
kb@sartography.com,
alex@sartography.com,
dan@sartography.com,
mike@sartography.com,
jason@sartography.com,
j@sartography.com,
elizabeth@sartography.com,
jon@sartography.com,
]
Finance Team:
users:
[
jakub,
alex,
dan,
mike,
jason,
j,
amir,
jarrad,
elizabeth,
jon,
natalia,
sasha,
fin,
fin1,
jakub@status.im,
amir@status.im,
jarrad@status.im,
sasha@status.im,
fin@status.im,
fin1@status.im,
alex@sartography.com,
dan@sartography.com,
mike@sartography.com,
jason@sartography.com,
j@sartography.com,
elizabeth@sartography.com,
jon@sartography.com,
]
demo:
users:
[
core,
fin,
fin1,
harmeet,
sasha,
manuchehr,
lead,
lead1
harmeet@status.im,
sasha@status.im,
manuchehr@status.im,
core@status.im,
fin@status.im,
fin1@status.im,
lead@status.im,
lead1@status.im,
]
core-contributor:
test:
users:
[
core,
harmeet,
natalia@sartography.com,
]
permissions:
@ -67,104 +63,86 @@ permissions:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
uri: /process-instances/*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
user-groups-for-current-user:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/user-groups/for-current-user
# read all for everybody
# open system defaults for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-groups/*
uri: /process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-models/*
uri: /process-models/*
# basic perms for everybody
read-all-process-instances-for-me:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/for-me/*
manage-process-instance-reports:
uri: /process-instances/for-me/*
read-process-instance-reports:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/reports/*
uri: /process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/processes
manage-procurement-admin-instances:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement:*
manage-procurement-admin-instances-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement/*
manage-procurement-admin-instance-logs:
groups: ["Project Lead"]
uri: /processes
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:*
manage-procurement-admin-instance-logs-slash:
groups: ["Project Lead"]
uri: /service-tasks
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /tasks/*
user-groups-for-current-user:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement/*
uri: /user-groups/for-current-user
manage-revenue-streams-instances:
groups: ["core-contributor", "demo"]
groups: ["demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-revenue-streams-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
allowed_permissions: [create]
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
groups: ["core-contributor", "demo"]
groups: ["demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-invoice-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:procurement:core-contributor-invoice-management:*
allowed_permissions: [create]
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
groups: ["core-contributor", "demo"]
groups: ["demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
manage-procurement-instance-logs:
groups: ["core-contributor", "demo"]
allowed_permissions: [create]
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
manage-revenue-streams-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:vendor-lifecycle-management:*
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances-for-me:
groups: ["demo"]
users: []
allowed_permissions: [read]
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
create-test-instances:
groups: ["test"]
users: []
allowed_permissions: [create, read]
uri: /process-instances/misc:test:*

View File

@ -2,60 +2,7 @@ default_group: everybody
groups:
admin:
users:
[
admin,
jakub,
kb,
alex,
dan,
mike,
jason,
j,
jarrad,
elizabeth,
jon,
]
Finance Team:
users:
[
jakub,
alex,
dan,
mike,
jason,
j,
amir,
jarrad,
elizabeth,
jon,
sasha,
fin,
fin1,
]
demo:
users:
[
core,
fin,
fin1,
harmeet,
sasha,
manuchehr,
lead,
lead1
]
core-contributor:
users:
[
core,
harmeet,
]
test:
users: [natalia]
users: [admin@spiffworkflow.org]
permissions:
admin:
@ -63,110 +10,3 @@ permissions:
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
user-groups-for-current-user:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/user-groups/for-current-user
# read all for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-models/*
read-all-process-instances-for-me:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/for-me/*
read-process-instance-reports:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/processes
manage-procurement-admin:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement:*
manage-procurement-admin-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement/*
manage-procurement-admin-models:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/manage-procurement:*
manage-procurement-admin-models-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/manage-procurement/*
manage-procurement-admin-instances:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement:*
manage-procurement-admin-instances-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement/*
finance-admin:
groups: ["Finance Team"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement:procurement:*
manage-revenue-streams-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
create-test-instances:
groups: ["test"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:test:*

View File

@ -1,5 +1,12 @@
default_group: everybody
users:
testadmin1:
service: https://testing/openid/thing
email: testadmin1@spiffworkflow.org
password: admin
preferred_username: El administrador de la muerte
groups:
admin:
users: [testadmin1, testadmin2]
@ -14,7 +21,7 @@ permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete, list, instantiate]
allowed_permissions: [create, read, update, delete]
uri: /*
read-all:
@ -27,29 +34,29 @@ permissions:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
uri: /tasks/*
# TODO: all uris should really have the same structure
finance-admin-group:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/finance/*
uri: /process-groups/finance/*
finance-admin-model:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/finance/*
uri: /process-models/finance/*
finance-admin-model-lanes:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/finance:model_with_lanes/*
uri: /process-models/finance:model_with_lanes/*
finance-admin-instance-run:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
uri: /process-instances/*

View File

@ -0,0 +1,11 @@
"""Qa1."""
from os import environ
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2")
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
GIT_USER_EMAIL = environ.get(
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
)

View File

@ -1,7 +1,7 @@
"""Staging."""
from os import environ
GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
GIT_BRANCH = environ.get("GIT_BRANCH", default="staging")
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
GIT_COMMIT_ON_SAVE = False
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"

View File

@ -0,0 +1,2 @@
"""Api_version."""
V1_API_PATH_PREFIX = "/v1.0"

View File

@ -27,6 +27,9 @@ class GroupModel(FlaskBpmnGroupModel):
identifier = db.Column(db.String(255))
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
user_group_assignments_waiting = relationship( # type: ignore
"UserGroupAssignmentWaitingModel", cascade="delete"
)
users = relationship( # type: ignore
"UserModel",
viewonly=True,

View File

@ -8,7 +8,6 @@ from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.orm import RelationshipProperty
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@ -31,13 +30,16 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"),
)
actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id))
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
# actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
form_file_name: str | None = db.Column(db.String(50))
ui_form_file_name: str | None = db.Column(db.String(50))

View File

@ -32,14 +32,6 @@ class Permission(enum.Enum):
update = "update"
delete = "delete"
# maybe read to GET process_model/process-instances instead?
list = "list"
# maybe use create instead on
# POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/*
# POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/332/run
instantiate = "instantiate" # this is something you do to a process model
class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
"""PermissionAssignmentModel."""

View File

@ -60,10 +60,15 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
process_initiator = relationship("UserModel")
active_human_tasks = relationship(
"HumanTaskModel",
primaryjoin="and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)",
) # type: ignore
human_tasks = relationship(
"HumanTaskModel",
cascade="delete",
primaryjoin="and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)",
overlaps="active_human_tasks",
) # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore

View File

@ -1,13 +1,11 @@
"""Spiff_step_details."""
from dataclasses import dataclass
from typing import Optional
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@ -20,10 +18,13 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
# human_task_id: int = db.Column(
# ForeignKey(HumanTaskModel.id) # type: ignore
# )
spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
completed_by_user_id: int = db.Column(db.Integer, nullable=True)
lane_assignment_id: Optional[int] = db.Column(
ForeignKey(GroupModel.id), nullable=True
)
# completed_by_user_id: int = db.Column(db.Integer, nullable=True)
# lane_assignment_id: Optional[int] = db.Column(
# ForeignKey(GroupModel.id), nullable=True
# )

View File

@ -1,22 +1,15 @@
"""User."""
from __future__ import annotations
from typing import Any
import jwt
import marshmallow
from flask import current_app
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from marshmallow import Schema
from sqlalchemy.orm import relationship
from sqlalchemy.orm import validates
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.services.authentication_service import (
AuthenticationProviderTypes,
)
class UserNotFoundError(Exception):
@ -28,14 +21,15 @@ class UserModel(SpiffworkflowBaseDBModel):
__tablename__ = "user"
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
id = db.Column(db.Integer, primary_key=True)
# server and service id must be unique, not username.
username = db.Column(db.String(255), nullable=False, unique=False)
uid = db.Column(db.String(50), unique=True)
service = db.Column(db.String(50), nullable=False, unique=False)
username = db.Column(
db.String(255), nullable=False, unique=True
) # should always be a unique value
service = db.Column(
db.String(255), nullable=False, unique=False
) # not 'openid' -- google, aws
service_id = db.Column(db.String(255), nullable=False, unique=False)
name = db.Column(db.String(255))
display_name = db.Column(db.String(255))
email = db.Column(db.String(255))
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
@ -49,21 +43,6 @@ class UserModel(SpiffworkflowBaseDBModel):
)
principal = relationship("PrincipalModel", uselist=False) # type: ignore
@validates("service")
def validate_service(self, key: str, value: Any) -> str:
"""Validate_service."""
try:
ap_type = getattr(AuthenticationProviderTypes, value, None)
except Exception as e:
raise ValueError(f"invalid service type: {value}") from e
if ap_type is not None:
ap_value: str = ap_type.value
return ap_value
raise ApiError(
error_code="invalid_service",
message=f"Could not validate service with value: {value}",
)
def encode_auth_token(self) -> str:
"""Generate the Auth Token.

View File

@ -0,0 +1,34 @@
"""UserGroupAssignment."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.group import GroupModel
class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
"""When a user is assigned to a group, but that username does not exist.
We cache it here to be applied in the event the user does log in to the system.
"""
MATCH_ALL_USERS = "*"
__tablename__ = "user_group_assignment_waiting"
__table_args__ = (
db.UniqueConstraint(
"username", "group_id", name="user_group_assignment_staged_unique"
),
)
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), nullable=False)
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False)
group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore
def is_match_all(self) -> bool:
"""Is_match_all."""
if self.username == self.MATCH_ALL_USERS:
return True
return False

View File

@ -141,7 +141,7 @@ def process_model_save(process_model_id: str, file_name: str) -> Union[str, Resp
@admin_blueprint.route("/process-models/<process_model_id>/run", methods=["GET"])
def process_model_run(process_model_id: str) -> Union[str, Response]:
"""Process_model_run."""
user = UserService.create_user("internal", "Mr. Test", username="Mr. Test")
user = UserService.create_user("Mr. Test", "internal", "Mr. Test")
process_instance = (
ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model_id, user

View File

@ -111,6 +111,7 @@ def token() -> dict:
"iat": time.time(),
"exp": time.time() + 86400, # Expire after a day.
"sub": user_name,
"email": user_details["email"],
"preferred_username": user_details.get("preferred_username", user_name),
},
client_secret,

View File

@ -2,7 +2,6 @@
import json
import os
import random
import re
import string
import uuid
from typing import Any
@ -32,10 +31,7 @@ from SpiffWorkflow.task import TaskState
from sqlalchemy import and_
from sqlalchemy import asc
from sqlalchemy import desc
from sqlalchemy import func
from sqlalchemy import or_
from sqlalchemy.orm import aliased
from sqlalchemy.orm import selectinload
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
@ -79,7 +75,6 @@ from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
@ -140,7 +135,6 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
status_code=400,
)
)
response_dict: dict[str, dict[str, bool]] = {}
requests_to_check = body["requests_to_check"]
@ -163,21 +157,16 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
return make_response(jsonify({"results": response_dict}), 200)
def modify_process_model_id(process_model_id: str) -> str:
"""Modify_process_model_id."""
return process_model_id.replace("/", ":")
def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str:
"""Un_modify_modified_process_model_id."""
return modified_process_model_identifier.replace(":", "/")
def process_group_add(body: dict) -> flask.wrappers.Response:
def process_group_create(body: dict) -> flask.wrappers.Response:
"""Add_process_group."""
process_group = ProcessGroup(**body)
ProcessModelService.add_process_group(process_group)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} added process group {process_group.id}"
)
return make_response(jsonify(process_group), 201)
@ -187,7 +176,7 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo
"""Process_group_delete."""
process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
ProcessModelService().process_group_delete(process_group_id)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} deleted process group {process_group_id}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -207,7 +196,7 @@ def process_group_update(
process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
process_group = ProcessGroup(id=process_group_id, **body_filtered)
ProcessModelService.update_process_group(process_group)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} updated process group {process_group_id}"
)
return make_response(jsonify(process_group), 200)
@ -274,7 +263,7 @@ def process_group_move(
new_process_group = ProcessModelService().process_group_move(
original_process_group_id, new_location
)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
)
return make_response(jsonify(new_process_group), 200)
@ -325,7 +314,7 @@ def process_model_create(
)
ProcessModelService.add_process_model(process_model_info)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} created process model {process_model_info.id}"
)
return Response(
@ -341,7 +330,7 @@ def process_model_delete(
"""Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
ProcessModelService().process_model_delete(process_model_identifier)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} deleted process model {process_model_identifier}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -367,7 +356,7 @@ def process_model_update(
process_model = get_process_model(process_model_identifier)
ProcessModelService.update_process_model(process_model, body_filtered)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} updated process model {process_model_identifier}"
)
return ProcessModelInfoSchema().dump(process_model)
@ -401,7 +390,7 @@ def process_model_move(
new_process_model = ProcessModelService().process_model_move(
original_process_model_id, new_location
)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
)
return make_response(jsonify(new_process_model), 200)
@ -500,7 +489,7 @@ def process_model_file_update(
)
SpecFileService.update_file(process_model, file_name, request_file_contents)
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
)
@ -524,7 +513,7 @@ def process_model_file_delete(
)
) from exception
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -548,7 +537,7 @@ def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response:
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
commit_and_push_to_git(
_commit_and_push_to_git(
f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}"
)
return Response(
@ -595,7 +584,7 @@ def process_instance_run(
if do_engine_steps:
try:
processor.do_engine_steps()
processor.do_engine_steps(save=True)
except ApiError as e:
ErrorHandlingService().handle_error(processor, e)
raise e
@ -608,7 +597,6 @@ def process_instance_run(
status_code=400,
task=task,
) from e
processor.save()
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
MessageService.process_message_instances()
@ -860,7 +848,7 @@ def process_instance_list_for_me(
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
group_identifier: Optional[str] = None,
user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list_for_me."""
return process_instance_list(
@ -875,7 +863,7 @@ def process_instance_list_for_me(
user_filter=user_filter,
report_identifier=report_identifier,
report_id=report_id,
group_identifier=group_identifier,
user_group_identifier=user_group_identifier,
with_relation_to_me=True,
)
@ -889,272 +877,51 @@ def process_instance_list(
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
initiated_by_me: Optional[bool] = None,
with_tasks_completed_by_me: Optional[bool] = None,
with_tasks_completed_by_my_group: Optional[bool] = None,
with_relation_to_me: Optional[bool] = None,
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
group_identifier: Optional[str] = None,
user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list."""
process_instance_report = ProcessInstanceReportService.report_with_identifier(
g.user, report_id, report_identifier
)
print(f"with_relation_to_me: {with_relation_to_me}")
if user_filter:
report_filter = ProcessInstanceReportFilter(
process_model_identifier,
start_from,
start_to,
end_from,
end_to,
process_status.split(",") if process_status else None,
initiated_by_me,
with_tasks_completed_by_me,
with_tasks_completed_by_my_group,
with_relation_to_me,
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
with_relation_to_me=with_relation_to_me,
process_status=process_status.split(",") if process_status else None,
)
else:
report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report,
process_model_identifier,
start_from,
start_to,
end_from,
end_to,
process_status,
initiated_by_me,
with_tasks_completed_by_me,
with_tasks_completed_by_my_group,
with_relation_to_me,
process_instance_report=process_instance_report,
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
with_relation_to_me=with_relation_to_me,
)
)
process_instance_query = ProcessInstanceModel.query
# Always join that hot user table for good performance at serialization time.
process_instance_query = process_instance_query.options(
selectinload(ProcessInstanceModel.process_initiator)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
page=page,
per_page=per_page,
user=g.user,
)
if report_filter.process_model_identifier is not None:
process_model = get_process_model(
f"{report_filter.process_model_identifier}",
)
process_instance_query = process_instance_query.filter_by(
process_model_identifier=process_model.id
)
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
if (
ProcessInstanceModel.start_in_seconds is None
or ProcessInstanceModel.end_in_seconds is None
):
raise (
ApiError(
error_code="unexpected_condition",
message="Something went very wrong",
status_code=500,
)
)
if report_filter.start_from is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds >= report_filter.start_from
)
if report_filter.start_to is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds <= report_filter.start_to
)
if report_filter.end_from is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds >= report_filter.end_from
)
if report_filter.end_to is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds <= report_filter.end_to
)
if report_filter.process_status is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore
)
if report_filter.initiated_by_me is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
process_instance_query = process_instance_query.filter_by(
process_initiator=g.user
)
if report_filter.with_relation_to_me is True:
process_instance_query = process_instance_query.outerjoin(
HumanTaskModel
).outerjoin(
HumanTaskUserModel,
and_(
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
HumanTaskUserModel.user_id == g.user.id,
),
)
process_instance_query = process_instance_query.filter(
or_(
HumanTaskUserModel.id.is_not(None),
ProcessInstanceModel.process_initiator_id == g.user.id,
)
)
# TODO: not sure if this is exactly what is wanted
if report_filter.with_tasks_completed_by_me is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
# process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
# process_instance_query = process_instance_query.add_columns(UserModel.username)
# search for process_instance.UserModel.username in this file for more details about why adding columns is annoying.
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.process_initiator_id != g.user.id
)
process_instance_query = process_instance_query.join(
SpiffStepDetailsModel,
ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id,
)
process_instance_query = process_instance_query.join(
SpiffLoggingModel,
ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id,
)
process_instance_query = process_instance_query.filter(
SpiffLoggingModel.message.contains("COMPLETED") # type: ignore
)
process_instance_query = process_instance_query.filter(
SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step
)
process_instance_query = process_instance_query.filter(
SpiffStepDetailsModel.completed_by_user_id == g.user.id
)
if report_filter.with_tasks_completed_by_my_group is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
process_instance_query = process_instance_query.join(
SpiffStepDetailsModel,
ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id,
)
process_instance_query = process_instance_query.join(
SpiffLoggingModel,
ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id,
)
process_instance_query = process_instance_query.filter(
SpiffLoggingModel.message.contains("COMPLETED") # type: ignore
)
process_instance_query = process_instance_query.filter(
SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step
)
if group_identifier:
process_instance_query = process_instance_query.join(
GroupModel,
GroupModel.identifier == group_identifier,
)
else:
process_instance_query = process_instance_query.join(
GroupModel,
GroupModel.id == SpiffStepDetailsModel.lane_assignment_id,
)
process_instance_query = process_instance_query.join(
UserGroupAssignmentModel,
UserGroupAssignmentModel.group_id == GroupModel.id,
)
process_instance_query = process_instance_query.filter(
UserGroupAssignmentModel.user_id == g.user.id
)
instance_metadata_aliases = {}
stock_columns = ProcessInstanceReportService.get_column_names_for_model(
ProcessInstanceModel
)
for column in process_instance_report.report_metadata["columns"]:
if column["accessor"] in stock_columns:
continue
instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
filter_for_column = None
if "filter_by" in process_instance_report.report_metadata:
filter_for_column = next(
(
f
for f in process_instance_report.report_metadata["filter_by"]
if f["field_name"] == column["accessor"]
),
None,
)
isouter = True
conditions = [
ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
instance_metadata_alias.key == column["accessor"],
]
if filter_for_column:
isouter = False
conditions.append(
instance_metadata_alias.value == filter_for_column["field_value"]
)
process_instance_query = process_instance_query.join(
instance_metadata_alias, and_(*conditions), isouter=isouter
).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"]))
order_by_query_array = []
order_by_array = process_instance_report.report_metadata["order_by"]
if len(order_by_array) < 1:
order_by_array = ProcessInstanceReportModel.default_order_by()
for order_by_option in order_by_array:
attribute = re.sub("^-", "", order_by_option)
if attribute in stock_columns:
if order_by_option.startswith("-"):
order_by_query_array.append(
getattr(ProcessInstanceModel, attribute).desc()
)
else:
order_by_query_array.append(
getattr(ProcessInstanceModel, attribute).asc()
)
elif attribute in instance_metadata_aliases:
if order_by_option.startswith("-"):
order_by_query_array.append(
func.max(instance_metadata_aliases[attribute].value).desc()
)
else:
order_by_query_array.append(
func.max(instance_metadata_aliases[attribute].value).asc()
)
process_instances = (
process_instance_query.group_by(ProcessInstanceModel.id)
.add_columns(ProcessInstanceModel.id)
.order_by(*order_by_query_array)
.paginate(page=page, per_page=per_page, error_out=False)
)
results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
process_instances.items, process_instance_report.report_metadata["columns"]
)
response_json = {
"report": process_instance_report,
"results": results,
"filters": report_filter.to_dict(),
"pagination": {
"count": len(results),
"total": process_instances.total,
"pages": process_instances.pages,
},
}
return make_response(jsonify(response_json), 200)
@ -1470,11 +1237,11 @@ def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Respo
def task_list_for_my_groups(
group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_for_my_groups."""
return get_tasks(
group_identifier=group_identifier,
user_group_identifier=user_group_identifier,
processes_started_by_user=False,
page=page,
per_page=per_page,
@ -1494,7 +1261,7 @@ def get_tasks(
has_lane_assignment_id: bool = True,
page: int = 1,
per_page: int = 100,
group_identifier: Optional[str] = None,
user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Get_tasks."""
user_id = g.user.id
@ -1532,9 +1299,9 @@ def get_tasks(
),
)
if has_lane_assignment_id:
if group_identifier:
if user_group_identifier:
human_tasks_query = human_tasks_query.filter(
GroupModel.identifier == group_identifier
GroupModel.identifier == user_group_identifier
)
else:
human_tasks_query = human_tasks_query.filter(
@ -1550,7 +1317,7 @@ def get_tasks(
ProcessInstanceModel.updated_at_in_seconds,
ProcessInstanceModel.created_at_in_seconds,
UserModel.username,
GroupModel.identifier.label("group_identifier"),
GroupModel.identifier.label("user_group_identifier"),
HumanTaskModel.task_name,
HumanTaskModel.task_title,
HumanTaskModel.process_model_display_name,
@ -1580,7 +1347,6 @@ def process_instance_task_list_without_task_data_for_me(
) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data_for_me."""
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
print(f"process_instance: {process_instance}")
return process_instance_task_list(
modified_process_model_identifier,
process_instance,
@ -1768,6 +1534,30 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
return make_response(jsonify(task), 200)
def process_data_show(
process_instance_id: int,
process_data_identifier: str,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_data_show."""
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance)
all_process_data = processor.get_data()
process_data_value = None
if process_data_identifier in all_process_data:
process_data_value = all_process_data[process_data_identifier]
return make_response(
jsonify(
{
"process_data_identifier": process_data_identifier,
"process_data_value": process_data_value,
}
),
200,
)
def task_submit(
process_instance_id: int,
task_id: str,
@ -1897,7 +1687,7 @@ def script_unit_test_create(
extension_elements = None
extension_elements_array = script_task_element.xpath(
"//bpmn:extensionElements",
".//bpmn:extensionElements",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
)
if len(extension_elements_array) == 0:
@ -2135,7 +1925,7 @@ def secret_list(
return make_response(jsonify(response_json), 200)
def add_secret(body: Dict) -> Response:
def secret_create(body: Dict) -> Response:
"""Add secret."""
secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
return Response(
@ -2145,19 +1935,70 @@ def add_secret(body: Dict) -> Response:
)
def update_secret(key: str, body: dict) -> Response:
def secret_update(key: str, body: dict) -> Response:
"""Update secret."""
SecretService().update_secret(key, body["value"], g.user.id)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def delete_secret(key: str) -> Response:
def secret_delete(key: str) -> Response:
"""Delete secret."""
current_user = UserService.current_user()
SecretService.delete_secret(key, current_user.id)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def task_data_update(
process_instance_id: str,
modified_process_model_identifier: str,
task_id: str,
body: Dict,
) -> Response:
"""Update task data."""
process_instance = ProcessInstanceModel.query.filter(
ProcessInstanceModel.id == int(process_instance_id)
).first()
if process_instance:
if process_instance.status != "suspended":
raise ProcessInstanceTaskDataCannotBeUpdatedError(
f"The process instance needs to be suspended to udpate the task-data. It is currently: {process_instance.status}"
)
process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json)
if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str)
if task_id in process_instance_bpmn_json_dict["tasks"]:
process_instance_bpmn_json_dict["tasks"][task_id][
"data"
] = new_task_data_dict
process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict)
db.session.add(process_instance)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
raise ApiError(
error_code="update_task_data_error",
message=f"Could not update the Instance. Original error is {e}",
) from e
else:
raise ApiError(
error_code="update_task_data_error",
message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.",
)
else:
raise ApiError(
error_code="update_task_data_error",
message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.",
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
)
def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any:
"""Get_required_parameter_or_raise."""
return_value = None
@ -2334,7 +2175,7 @@ def mark_task_complete(
)
def commit_and_push_to_git(message: str) -> None:
def _commit_and_push_to_git(message: str) -> None:
"""Commit_and_push_to_git."""
if current_app.config["GIT_COMMIT_ON_SAVE"]:
git_output = GitService.commit(message=message)

View File

@ -76,7 +76,7 @@ def verify_token(
except ApiError as ae: # API Error is only thrown in the token is outdated.
# Try to refresh the token
user = UserService.get_user_by_service_and_service_id(
"open_id", decoded_token["sub"]
decoded_token["iss"], decoded_token["sub"]
)
if user:
refresh_token = AuthenticationService.get_refresh_token(user.id)
@ -105,10 +105,12 @@ def verify_token(
) from e
if (
user_info is not None and "error" not in user_info
user_info is not None
and "error" not in user_info
and "iss" in user_info
): # not sure what to test yet
user_model = (
UserModel.query.filter(UserModel.service == "open_id")
UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
@ -293,7 +295,6 @@ def get_decoded_token(token: str) -> Optional[Dict]:
try:
decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e:
print(f"Exception in get_token_type: {e}")
raise ApiError(
error_code="invalid_token", message="Cannot decode token."
) from e
@ -341,9 +342,5 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
)
if user:
return user
user = UserModel(
username=service_id,
service=service,
service_id=service_id,
)
user = UserService.create_user(service_id, service, service_id)
return user

View File

@ -1,43 +0,0 @@
"""Get_env."""
from typing import Any
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.group import GroupNotFoundError
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.user_service import UserService
class AddUserToGroup(Script):
"""AddUserToGroup."""
def get_description(self) -> str:
"""Get_description."""
return """Add a given user to a given group."""
def run(
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any,
) -> Any:
"""Run."""
username = args[0]
group_identifier = args[1]
user = UserModel.query.filter_by(username=username).first()
if user is None:
raise UserNotFoundError(
f"Script 'add_user_to_group' could not find a user with username: {username}"
)
group = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None:
raise GroupNotFoundError(
f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'."
)
UserService.add_user_to_group(user, group)

View File

@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
class FactService(Script):
"""FactService."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Just your basic class that can pull in data from a few api endpoints and

View File

@ -0,0 +1,71 @@
"""Get_env."""
from collections import OrderedDict
from typing import Any
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.scripts.script import Script
class GetAllPermissions(Script):
"""GetAllPermissions."""
def get_description(self) -> str:
"""Get_description."""
return """Get all permissions currently in the system."""
def run(
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any,
) -> Any:
"""Run."""
permission_assignments = (
PermissionAssignmentModel.query.join(
PrincipalModel,
PrincipalModel.id == PermissionAssignmentModel.principal_id,
)
.join(GroupModel, GroupModel.id == PrincipalModel.group_id)
.join(
PermissionTargetModel,
PermissionTargetModel.id
== PermissionAssignmentModel.permission_target_id,
)
.add_columns(
PermissionAssignmentModel.permission,
PermissionTargetModel.uri,
GroupModel.identifier.label("group_identifier"),
)
)
permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict()
for pa in permission_assignments:
permissions.setdefault((pa.group_identifier, pa.uri), []).append(
pa.permission
)
def replace_suffix(string: str, old: str, new: str) -> str:
"""Replace_suffix."""
if string.endswith(old):
return string[: -len(old)] + new
return string
# sort list of strings based on a specific order
def sort_by_order(string_list: list, order: list) -> list:
"""Sort_by_order."""
return sorted(string_list, key=lambda x: order.index(x))
return [
{
"group_identifier": k[0],
"uri": replace_suffix(k[1], "%", "*"),
"permissions": sort_by_order(v, ["create", "read", "update", "delete"]),
}
for k, v in permissions.items()
]

View File

@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetCurrentUser(Script):
"""GetCurrentUser."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Return the current user."""

View File

@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetEnv(Script):
"""GetEnv."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Returns the current environment - ie testing, staging, production."""

View File

@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetFrontendUrl(Script):
"""GetFrontendUrl."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Return the url to the frontend."""

View File

@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetGroupMembers(Script):
"""GetGroupMembers."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Return the list of usernames of the users in the given group."""

View File

@ -14,6 +14,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetLocaltime(Script):
"""GetLocaltime."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Converts a Datetime object into a Datetime object for a specific timezone.

View File

@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetProcessInfo(Script):
"""GetProcessInfo."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Returns a dictionary of information about the currently running process."""

View File

@ -0,0 +1,39 @@
"""Get_env."""
from typing import Any
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.authorization_service import AuthorizationService
class RefreshPermissions(Script):
"""RefreshPermissions."""
def get_description(self) -> str:
"""Get_description."""
return """Add permissions using a dict.
group_info: [
{
'name': group_identifier,
'users': array_of_users,
'permissions': [
{
'actions': array_of_actions - create, read, etc,
'uri': target_uri
}
]
}
]
"""
def run(
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any,
) -> Any:
"""Run."""
group_info = args[0]
AuthorizationService.refresh_permissions(group_info)

View File

@ -10,9 +10,12 @@ from typing import Callable
from flask_bpmn.api.api_error import ApiError
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceNotFoundError
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.services.authorization_service import AuthorizationService
# Generally speaking, having some global in a flask app is TERRIBLE.
# This is here, because after loading the application this will never change under
@ -20,6 +23,10 @@ from spiffworkflow_backend.models.script_attributes_context import (
SCRIPT_SUB_CLASSES = None
class ScriptUnauthorizedForUserError(Exception):
"""ScriptUnauthorizedForUserError."""
class Script:
"""Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks."""
@ -43,6 +50,15 @@ class Script:
+ "does not properly implement the run function.",
)
@staticmethod
def requires_privileged_permissions() -> bool:
"""It seems safer to default to True and make safe functions opt in for any user to run them.
To give access to script for a given user, add a 'create' permission with following target-uri:
'/can-run-privileged-script/{script_name}'
"""
return True
@staticmethod
def generate_augmented_list(
script_attributes_context: ScriptAttributesContext,
@ -71,18 +87,50 @@ class Script:
that we created.
"""
instance = subclass()
return lambda *ar, **kw: subclass.run(
instance,
script_attributes_context,
*ar,
**kw,
)
def check_script_permission() -> None:
"""Check_script_permission."""
if subclass.requires_privileged_permissions():
script_function_name = get_script_function_name(subclass)
uri = f"/can-run-privileged-script/{script_function_name}"
process_instance = ProcessInstanceModel.query.filter_by(
id=script_attributes_context.process_instance_id
).first()
if process_instance is None:
raise ProcessInstanceNotFoundError(
f"Could not find a process instance with id '{script_attributes_context.process_instance_id}' "
f"when running script '{script_function_name}'"
)
user = process_instance.process_initiator
has_permission = AuthorizationService.user_has_permission(
user=user, permission="create", target_uri=uri
)
if not has_permission:
raise ScriptUnauthorizedForUserError(
f"User {user.username} does not have access to run privileged script '{script_function_name}'"
)
def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
"""Run_script_if_allowed."""
check_script_permission()
return subclass.run(
instance,
script_attributes_context,
*ar,
**kw,
)
return run_script_if_allowed
def get_script_function_name(subclass: type[Script]) -> str:
"""Get_script_function_name."""
return subclass.__module__.split(".")[-1]
execlist = {}
subclasses = Script.get_all_subclasses()
for x in range(len(subclasses)):
subclass = subclasses[x]
execlist[subclass.__module__.split(".")[-1]] = make_closure(
execlist[get_script_function_name(subclass)] = make_closure(
subclass, script_attributes_context=script_attributes_context
)
return execlist

View File

@ -93,7 +93,7 @@ class AuthenticationService:
+ f"?state={state}&"
+ "response_type=code&"
+ f"client_id={self.client_id()}&"
+ "scope=openid&"
+ "scope=openid profile email&"
+ f"redirect_uri={return_redirect_url}"
)
return login_redirect_url

View File

@ -1,10 +1,14 @@
"""Authorization_service."""
import inspect
import re
from dataclasses import dataclass
from hashlib import sha256
from hmac import compare_digest
from hmac import HMAC
from typing import Any
from typing import Optional
from typing import Set
from typing import TypedDict
from typing import Union
import jwt
@ -19,6 +23,7 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from sqlalchemy import or_
from sqlalchemy import text
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
@ -45,6 +50,40 @@ class UserDoesNotHaveAccessToTaskError(Exception):
"""UserDoesNotHaveAccessToTaskError."""
class InvalidPermissionError(Exception):
"""InvalidPermissionError."""
@dataclass
class PermissionToAssign:
"""PermissionToAssign."""
permission: str
target_uri: str
# the relevant permissions are the only API methods that are currently available for each path prefix.
# if we add further API methods, we'll need to evaluate whether they should be added here.
PATH_SEGMENTS_FOR_PERMISSION_ALL = [
{"path": "/logs", "relevant_permissions": ["read"]},
{
"path": "/process-instances",
"relevant_permissions": ["create", "read", "delete"],
},
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
{"path": "/task-data", "relevant_permissions": ["read", "update"]},
{"path": "/process-data", "relevant_permissions": ["read"]},
]
class DesiredPermissionDict(TypedDict):
"""DesiredPermissionDict."""
group_identifiers: Set[str]
permission_assignments: list[PermissionAssignmentModel]
class AuthorizationService:
"""Determine whether a user has permission to perform their request."""
@ -75,6 +114,7 @@ class AuthorizationService:
) -> bool:
"""Has_permission."""
principal_ids = [p.id for p in principals]
target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX)
permission_assignments = (
PermissionAssignmentModel.query.filter(
@ -84,10 +124,12 @@ class AuthorizationService:
.join(PermissionTargetModel)
.filter(
or_(
text(f"'{target_uri}' LIKE permission_target.uri"),
text(f"'{target_uri_normalized}' LIKE permission_target.uri"),
# to check for exact matches as well
# see test_user_can_access_base_path_when_given_wildcard_permission unit test
text(f"'{target_uri}' = replace(permission_target.uri, '/%', '')"),
text(
f"'{target_uri_normalized}' = replace(replace(permission_target.uri, '/%', ''), ':%', '')"
),
)
)
.all()
@ -127,17 +169,15 @@ class AuthorizationService:
return cls.has_permission(principals, permission, target_uri)
@classmethod
def delete_all_permissions_and_recreate(cls) -> None:
"""Delete_all_permissions_and_recreate."""
def delete_all_permissions(cls) -> None:
"""Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?"""
for model in [PermissionAssignmentModel, PermissionTargetModel]:
db.session.query(model).delete()
# cascading to principals doesn't seem to work when attempting to delete all so do it like this instead
for group in GroupModel.query.all():
db.session.delete(group)
db.session.commit()
cls.import_permissions_from_yaml_file()
@classmethod
def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None:
@ -155,7 +195,7 @@ class AuthorizationService:
@classmethod
def import_permissions_from_yaml_file(
cls, raise_if_missing_user: bool = False
) -> None:
) -> DesiredPermissionDict:
"""Import_permissions_from_yaml_file."""
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
raise (
@ -169,13 +209,16 @@ class AuthorizationService:
permission_configs = yaml.safe_load(file)
default_group = None
unique_user_group_identifiers: Set[str] = set()
if "default_group" in permission_configs:
default_group_identifier = permission_configs["default_group"]
default_group = GroupService.find_or_create_group(default_group_identifier)
unique_user_group_identifiers.add(default_group_identifier)
if "groups" in permission_configs:
for group_identifier, group_config in permission_configs["groups"].items():
group = GroupService.find_or_create_group(group_identifier)
unique_user_group_identifiers.add(group_identifier)
for username in group_config["users"]:
user = UserModel.query.filter_by(username=username).first()
if user is None:
@ -188,26 +231,25 @@ class AuthorizationService:
continue
cls.associate_user_with_group(user, group)
permission_assignments = []
if "permissions" in permission_configs:
for _permission_identifier, permission_config in permission_configs[
"permissions"
].items():
uri = permission_config["uri"]
uri_with_percent = re.sub(r"\*", "%", uri)
permission_target = PermissionTargetModel.query.filter_by(
uri=uri_with_percent
).first()
if permission_target is None:
permission_target = PermissionTargetModel(uri=uri_with_percent)
db.session.add(permission_target)
db.session.commit()
permission_target = cls.find_or_create_permission_target(uri)
for allowed_permission in permission_config["allowed_permissions"]:
if "groups" in permission_config:
for group_identifier in permission_config["groups"]:
group = GroupService.find_or_create_group(group_identifier)
cls.create_permission_for_principal(
group.principal, permission_target, allowed_permission
unique_user_group_identifiers.add(group_identifier)
permission_assignments.append(
cls.create_permission_for_principal(
group.principal,
permission_target,
allowed_permission,
)
)
if "users" in permission_config:
for username in permission_config["users"]:
@ -218,14 +260,35 @@ class AuthorizationService:
.filter(UserModel.username == username)
.first()
)
cls.create_permission_for_principal(
principal, permission_target, allowed_permission
permission_assignments.append(
cls.create_permission_for_principal(
principal, permission_target, allowed_permission
)
)
if default_group is not None:
for user in UserModel.query.all():
cls.associate_user_with_group(user, default_group)
return {
"group_identifiers": unique_user_group_identifiers,
"permission_assignments": permission_assignments,
}
@classmethod
def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel:
"""Find_or_create_permission_target."""
uri_with_percent = re.sub(r"\*", "%", uri)
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
permission_target: Optional[
PermissionTargetModel
] = PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
if permission_target is None:
permission_target = PermissionTargetModel(uri=target_uri_normalized)
db.session.add(permission_target)
db.session.commit()
return permission_target
@classmethod
def create_permission_for_principal(
cls,
@ -449,33 +512,48 @@ class AuthorizationService:
@classmethod
def create_user_from_sign_in(cls, user_info: dict) -> UserModel:
"""Create_user_from_sign_in."""
"""Name, family_name, given_name, middle_name, nickname, preferred_username,"""
"""Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """
"""Email."""
is_new_user = False
user_model = (
UserModel.query.filter(UserModel.service == "open_id")
UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
email = display_name = username = ""
if "email" in user_info:
username = user_info["email"]
email = user_info["email"]
else: # we fall back to the sub, which may be very ugly.
username = user_info["sub"] + "@" + user_info["iss"]
if "preferred_username" in user_info:
display_name = user_info["preferred_username"]
elif "nickname" in user_info:
display_name = user_info["nickname"]
elif "name" in user_info:
display_name = user_info["name"]
if user_model is None:
current_app.logger.debug("create_user in login_return")
is_new_user = True
name = username = email = ""
if "name" in user_info:
name = user_info["name"]
if "username" in user_info:
username = user_info["username"]
elif "preferred_username" in user_info:
username = user_info["preferred_username"]
if "email" in user_info:
email = user_info["email"]
user_model = UserService().create_user(
service="open_id",
service_id=user_info["sub"],
name=name,
username=username,
service=user_info["iss"],
service_id=user_info["sub"],
email=email,
display_name=display_name,
)
else:
# Update with the latest information
user_model.username = username
user_model.email = email
user_model.display_name = display_name
user_model.service = user_info["iss"]
user_model.service_id = user_info["sub"]
# this may eventually get too slow.
# when it does, be careful about backgrounding, because
# the user will immediately need permissions to use the site.
@ -490,6 +568,223 @@ class AuthorizationService:
# this cannot be None so ignore mypy
return user_model # type: ignore
@classmethod
def get_permissions_to_assign(
cls,
permission_set: str,
process_related_path_segment: str,
target_uris: list[str],
) -> list[PermissionToAssign]:
"""Get_permissions_to_assign."""
permissions = permission_set.split(",")
if permission_set == "all":
permissions = ["create", "read", "update", "delete"]
permissions_to_assign: list[PermissionToAssign] = []
# we were thinking that if you can start an instance, you ought to be able to view your own instances.
if permission_set == "start":
target_uri = f"/process-instances/{process_related_path_segment}"
permissions_to_assign.append(
PermissionToAssign(permission="create", target_uri=target_uri)
)
target_uri = f"/process-instances/for-me/{process_related_path_segment}"
permissions_to_assign.append(
PermissionToAssign(permission="read", target_uri=target_uri)
)
else:
if permission_set == "all":
for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
target_uri = (
f"{path_segment_dict['path']}/{process_related_path_segment}"
)
relevant_permissions = path_segment_dict["relevant_permissions"]
for permission in relevant_permissions:
permissions_to_assign.append(
PermissionToAssign(
permission=permission, target_uri=target_uri
)
)
for target_uri in target_uris:
for permission in permissions:
permissions_to_assign.append(
PermissionToAssign(permission=permission, target_uri=target_uri)
)
return permissions_to_assign
@classmethod
def explode_permissions(
cls, permission_set: str, target: str
) -> list[PermissionToAssign]:
"""Explodes given permissions to and returns list of PermissionToAssign objects.
These can be used to then iterate through and inserted into the database.
Target Macros:
ALL
* gives access to ALL api endpoints - useful to give admin-like permissions
PG:[process_group_identifier]
* affects given process-group and all sub process-groups and process-models
PM:[process_model_identifier]
* affects given process-model
BASIC
* Basic access to complete tasks and use the site
Permission Macros:
all
* create, read, update, delete
start
* create process-instances (aka instantiate or start a process-model)
* only works with PG and PM target macros
"""
permissions_to_assign: list[PermissionToAssign] = []
permissions = permission_set.split(",")
if permission_set == "all":
permissions = ["create", "read", "update", "delete"]
if target.startswith("PG:"):
process_group_identifier = (
target.removeprefix("PG:").replace("/", ":").removeprefix(":")
)
process_related_path_segment = f"{process_group_identifier}:*"
if process_group_identifier == "ALL":
process_related_path_segment = "*"
target_uris = [
f"/process-groups/{process_related_path_segment}",
f"/process-models/{process_related_path_segment}",
]
permissions_to_assign = (
permissions_to_assign
+ cls.get_permissions_to_assign(
permission_set, process_related_path_segment, target_uris
)
)
elif target.startswith("PM:"):
process_model_identifier = (
target.removeprefix("PM:").replace("/", ":").removeprefix(":")
)
process_related_path_segment = f"{process_model_identifier}/*"
if process_model_identifier == "ALL":
process_related_path_segment = "*"
target_uris = [f"/process-models/{process_related_path_segment}"]
permissions_to_assign = (
permissions_to_assign
+ cls.get_permissions_to_assign(
permission_set, process_related_path_segment, target_uris
)
)
elif permission_set == "start":
raise InvalidPermissionError(
"Permission 'start' is only available for macros PM and PG."
)
elif target.startswith("BASIC"):
permissions_to_assign.append(
PermissionToAssign(
permission="read", target_uri="/process-instances/for-me"
)
)
permissions_to_assign.append(
PermissionToAssign(permission="read", target_uri="/processes")
)
permissions_to_assign.append(
PermissionToAssign(permission="read", target_uri="/service-tasks")
)
permissions_to_assign.append(
PermissionToAssign(
permission="read", target_uri="/user-groups/for-current-user"
)
)
for permission in ["create", "read", "update", "delete"]:
permissions_to_assign.append(
PermissionToAssign(
permission=permission, target_uri="/process-instances/reports/*"
)
)
permissions_to_assign.append(
PermissionToAssign(permission=permission, target_uri="/tasks/*")
)
elif target == "ALL":
for permission in permissions:
permissions_to_assign.append(
PermissionToAssign(permission=permission, target_uri="/*")
)
elif target.startswith("/"):
for permission in permissions:
permissions_to_assign.append(
PermissionToAssign(permission=permission, target_uri=target)
)
else:
raise InvalidPermissionError(
f"Target uri '{target}' with permission set '{permission_set}' is invalid. "
f"The target uri must either be a macro of PG, PM, BASIC, or ALL or an api uri."
)
return permissions_to_assign
@classmethod
def add_permission_from_uri_or_macro(
cls, group_identifier: str, permission: str, target: str
) -> list[PermissionAssignmentModel]:
"""Add_permission_from_uri_or_macro."""
group = GroupService.find_or_create_group(group_identifier)
permissions_to_assign = cls.explode_permissions(permission, target)
permission_assignments = []
for permission_to_assign in permissions_to_assign:
permission_target = cls.find_or_create_permission_target(
permission_to_assign.target_uri
)
permission_assignments.append(
cls.create_permission_for_principal(
group.principal, permission_target, permission_to_assign.permission
)
)
return permission_assignments
@classmethod
def refresh_permissions(cls, group_info: list[dict[str, Any]]) -> None:
"""Adds new permission assignments and deletes old ones."""
initial_permission_assignments = PermissionAssignmentModel.query.all()
result = cls.import_permissions_from_yaml_file()
desired_permission_assignments = result["permission_assignments"]
desired_group_identifiers = result["group_identifiers"]
for group in group_info:
group_identifier = group["name"]
for username in group["users"]:
GroupService.add_user_to_group_or_add_to_waiting(
username, group_identifier
)
desired_group_identifiers.add(group_identifier)
for permission in group["permissions"]:
for crud_op in permission["actions"]:
desired_permission_assignments.extend(
cls.add_permission_from_uri_or_macro(
group_identifier=group_identifier,
target=permission["uri"],
permission=crud_op,
)
)
desired_group_identifiers.add(group_identifier)
for ipa in initial_permission_assignments:
if ipa not in desired_permission_assignments:
db.session.delete(ipa)
groups_to_delete = GroupModel.query.filter(
GroupModel.identifier.not_in(desired_group_identifiers)
).all()
for gtd in groups_to_delete:
db.session.delete(gtd)
db.session.commit()
class KeycloakAuthorization:
"""Interface with Keycloak server."""

View File

@ -4,6 +4,7 @@ from typing import Optional
from flask_bpmn.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.user_service import UserService
@ -22,3 +23,15 @@ class GroupService:
db.session.commit()
UserService.create_principal(group.id, id_column_name="group_id")
return group
@classmethod
def add_user_to_group_or_add_to_waiting(
cls, username: str, group_identifier: str
) -> None:
"""Add_user_to_group_or_add_to_waiting."""
group = cls.find_or_create_group(group_identifier)
user = UserModel.query.filter_by(username=username).first()
if user:
UserService.add_user_to_group(user, group)
else:
UserService.add_waiting_group_assignment(username, group)

View File

@ -154,6 +154,8 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
"time": time,
"decimal": decimal,
"_strptime": _strptime,
"enumerate": enumerate,
"list": list,
}
# This will overwrite the standard builtins
@ -563,7 +565,7 @@ class ProcessInstanceProcessor:
"spiff_step": self.process_instance_model.spiff_step or 1,
"task_json": task_json,
"timestamp": round(time.time()),
"completed_by_user_id": self.current_user().id,
# "completed_by_user_id": self.current_user().id,
}
def spiff_step_details(self) -> SpiffStepDetailsModel:
@ -574,14 +576,13 @@ class ProcessInstanceProcessor:
spiff_step=details_mapping["spiff_step"],
task_json=details_mapping["task_json"],
timestamp=details_mapping["timestamp"],
completed_by_user_id=details_mapping["completed_by_user_id"],
# completed_by_user_id=details_mapping["completed_by_user_id"],
)
return details_model
def save_spiff_step_details(self, human_task: HumanTaskModel) -> None:
def save_spiff_step_details(self) -> None:
"""SaveSpiffStepDetails."""
details_model = self.spiff_step_details()
details_model.lane_assignment_id = human_task.lane_assignment_id
db.session.add(details_model)
db.session.commit()
@ -1215,11 +1216,16 @@ class ProcessInstanceProcessor:
)
return user_tasks # type: ignore
def complete_task(self, task: SpiffTask, human_task: HumanTaskModel) -> None:
def complete_task(
self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel
) -> None:
"""Complete_task."""
self.increment_spiff_step()
self.bpmn_process_instance.complete_task_from_id(task.id)
self.save_spiff_step_details(human_task)
human_task.completed_by_user_id = user.id
db.session.add(human_task)
db.session.commit()
self.save_spiff_step_details()
def get_data(self) -> dict[str, Any]:
"""Get_data."""

View File

@ -1,14 +1,30 @@
"""Process_instance_report_service."""
import re
from dataclasses import dataclass
from typing import Optional
import sqlalchemy
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from sqlalchemy import and_
from sqlalchemy import func
from sqlalchemy import or_
from sqlalchemy.orm import aliased
from sqlalchemy.orm import selectinload
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@dataclass
@ -16,14 +32,16 @@ class ProcessInstanceReportFilter:
"""ProcessInstanceReportFilter."""
process_model_identifier: Optional[str] = None
user_group_identifier: Optional[str] = None
start_from: Optional[int] = None
start_to: Optional[int] = None
end_from: Optional[int] = None
end_to: Optional[int] = None
process_status: Optional[list[str]] = None
initiated_by_me: Optional[bool] = None
has_terminal_status: Optional[bool] = None
with_tasks_completed_by_me: Optional[bool] = None
with_tasks_completed_by_my_group: Optional[bool] = None
with_tasks_assigned_to_my_group: Optional[bool] = None
with_relation_to_me: Optional[bool] = None
def to_dict(self) -> dict[str, str]:
@ -32,6 +50,8 @@ class ProcessInstanceReportFilter:
if self.process_model_identifier is not None:
d["process_model_identifier"] = self.process_model_identifier
if self.user_group_identifier is not None:
d["user_group_identifier"] = self.user_group_identifier
if self.start_from is not None:
d["start_from"] = str(self.start_from)
if self.start_to is not None:
@ -44,13 +64,15 @@ class ProcessInstanceReportFilter:
d["process_status"] = ",".join(self.process_status)
if self.initiated_by_me is not None:
d["initiated_by_me"] = str(self.initiated_by_me).lower()
if self.has_terminal_status is not None:
d["has_terminal_status"] = str(self.has_terminal_status).lower()
if self.with_tasks_completed_by_me is not None:
d["with_tasks_completed_by_me"] = str(
self.with_tasks_completed_by_me
).lower()
if self.with_tasks_completed_by_my_group is not None:
d["with_tasks_completed_by_my_group"] = str(
self.with_tasks_completed_by_my_group
if self.with_tasks_assigned_to_my_group is not None:
d["with_tasks_assigned_to_my_group"] = str(
self.with_tasks_assigned_to_my_group
).lower()
if self.with_relation_to_me is not None:
d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
@ -92,7 +114,7 @@ class ProcessInstanceReportService:
"filter_by": [],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_instances_initiated_by_me": {
"system_report_completed_instances_initiated_by_me": {
"columns": [
{"Header": "id", "accessor": "id"},
{
@ -103,28 +125,32 @@ class ProcessInstanceReportService:
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "status", "accessor": "status"},
],
"filter_by": [{"field_name": "initiated_by_me", "field_value": True}],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_instances_with_tasks_completed_by_me": {
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "with_tasks_completed_by_me", "field_value": True}
{"field_name": "initiated_by_me", "field_value": True},
{"field_name": "has_terminal_status", "field_value": True},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_instances_with_tasks_completed_by_my_groups": {
"system_report_completed_instances_with_tasks_completed_by_me": {
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "with_tasks_completed_by_me", "field_value": True},
{"field_name": "has_terminal_status", "field_value": True},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_completed_instances_with_tasks_completed_by_my_groups": {
"columns": cls.builtin_column_options(),
"filter_by": [
{
"field_name": "with_tasks_completed_by_my_group",
"field_name": "with_tasks_assigned_to_my_group",
"field_value": True,
}
},
{"field_name": "has_terminal_status", "field_value": True},
],
"order_by": ["-start_in_seconds", "-id"],
},
}
process_instance_report = ProcessInstanceReportModel(
identifier=report_identifier,
created_by_id=user.id,
@ -167,28 +193,30 @@ class ProcessInstanceReportService:
return filters[key].split(",") if key in filters else None
process_model_identifier = filters.get("process_model_identifier")
user_group_identifier = filters.get("user_group_identifier")
start_from = int_value("start_from")
start_to = int_value("start_to")
end_from = int_value("end_from")
end_to = int_value("end_to")
process_status = list_value("process_status")
initiated_by_me = bool_value("initiated_by_me")
has_terminal_status = bool_value("has_terminal_status")
with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me")
with_tasks_completed_by_my_group = bool_value(
"with_tasks_completed_by_my_group"
)
with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group")
with_relation_to_me = bool_value("with_relation_to_me")
report_filter = ProcessInstanceReportFilter(
process_model_identifier,
user_group_identifier,
start_from,
start_to,
end_from,
end_to,
process_status,
initiated_by_me,
has_terminal_status,
with_tasks_completed_by_me,
with_tasks_completed_by_my_group,
with_tasks_assigned_to_my_group,
with_relation_to_me,
)
@ -199,14 +227,16 @@ class ProcessInstanceReportService:
cls,
process_instance_report: ProcessInstanceReportModel,
process_model_identifier: Optional[str] = None,
user_group_identifier: Optional[str] = None,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
initiated_by_me: Optional[bool] = None,
has_terminal_status: Optional[bool] = None,
with_tasks_completed_by_me: Optional[bool] = None,
with_tasks_completed_by_my_group: Optional[bool] = None,
with_tasks_assigned_to_my_group: Optional[bool] = None,
with_relation_to_me: Optional[bool] = None,
) -> ProcessInstanceReportFilter:
"""Filter_from_metadata_with_overrides."""
@ -214,6 +244,8 @@ class ProcessInstanceReportService:
if process_model_identifier is not None:
report_filter.process_model_identifier = process_model_identifier
if user_group_identifier is not None:
report_filter.user_group_identifier = user_group_identifier
if start_from is not None:
report_filter.start_from = start_from
if start_to is not None:
@ -226,11 +258,13 @@ class ProcessInstanceReportService:
report_filter.process_status = process_status.split(",")
if initiated_by_me is not None:
report_filter.initiated_by_me = initiated_by_me
if has_terminal_status is not None:
report_filter.has_terminal_status = has_terminal_status
if with_tasks_completed_by_me is not None:
report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me
if with_tasks_completed_by_my_group is not None:
report_filter.with_tasks_completed_by_my_group = (
with_tasks_completed_by_my_group
if with_tasks_assigned_to_my_group is not None:
report_filter.with_tasks_assigned_to_my_group = (
with_tasks_assigned_to_my_group
)
if with_relation_to_me is not None:
report_filter.with_relation_to_me = with_relation_to_me
@ -276,3 +310,207 @@ class ProcessInstanceReportService:
{"Header": "Username", "accessor": "username", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
]
@classmethod
def run_process_instance_report(
cls,
report_filter: ProcessInstanceReportFilter,
process_instance_report: ProcessInstanceReportModel,
user: UserModel,
page: int = 1,
per_page: int = 100,
) -> dict:
"""Run_process_instance_report."""
process_instance_query = ProcessInstanceModel.query
# Always join that hot user table for good performance at serialization time.
process_instance_query = process_instance_query.options(
selectinload(ProcessInstanceModel.process_initiator)
)
if report_filter.process_model_identifier is not None:
process_model = ProcessModelService.get_process_model(
f"{report_filter.process_model_identifier}",
)
process_instance_query = process_instance_query.filter_by(
process_model_identifier=process_model.id
)
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
if (
ProcessInstanceModel.start_in_seconds is None
or ProcessInstanceModel.end_in_seconds is None
):
raise (
ApiError(
error_code="unexpected_condition",
message="Something went very wrong",
status_code=500,
)
)
if report_filter.start_from is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds >= report_filter.start_from
)
if report_filter.start_to is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds <= report_filter.start_to
)
if report_filter.end_from is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds >= report_filter.end_from
)
if report_filter.end_to is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds <= report_filter.end_to
)
if report_filter.process_status is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore
)
if report_filter.initiated_by_me is True:
process_instance_query = process_instance_query.filter_by(
process_initiator=user
)
if report_filter.has_terminal_status is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
if (
not report_filter.with_tasks_completed_by_me
and not report_filter.with_tasks_assigned_to_my_group
and report_filter.with_relation_to_me is True
):
process_instance_query = process_instance_query.outerjoin(
HumanTaskModel
).outerjoin(
HumanTaskUserModel,
and_(
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
HumanTaskUserModel.user_id == user.id,
),
)
process_instance_query = process_instance_query.filter(
or_(
HumanTaskUserModel.id.is_not(None),
ProcessInstanceModel.process_initiator_id == user.id,
)
)
if report_filter.with_tasks_completed_by_me is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.process_initiator_id != user.id
)
process_instance_query = process_instance_query.join(
HumanTaskModel,
and_(
HumanTaskModel.process_instance_id == ProcessInstanceModel.id,
HumanTaskModel.completed_by_user_id == user.id,
),
)
if report_filter.with_tasks_assigned_to_my_group is True:
group_model_join_conditions = [
GroupModel.id == HumanTaskModel.lane_assignment_id
]
if report_filter.user_group_identifier:
group_model_join_conditions.append(
GroupModel.identifier == report_filter.user_group_identifier
)
process_instance_query = process_instance_query.join(HumanTaskModel)
process_instance_query = process_instance_query.join(
GroupModel, and_(*group_model_join_conditions)
)
process_instance_query = process_instance_query.join(
UserGroupAssignmentModel,
UserGroupAssignmentModel.group_id == GroupModel.id,
)
process_instance_query = process_instance_query.filter(
UserGroupAssignmentModel.user_id == user.id
)
instance_metadata_aliases = {}
stock_columns = ProcessInstanceReportService.get_column_names_for_model(
ProcessInstanceModel
)
for column in process_instance_report.report_metadata["columns"]:
if column["accessor"] in stock_columns:
continue
instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
filter_for_column = None
if "filter_by" in process_instance_report.report_metadata:
filter_for_column = next(
(
f
for f in process_instance_report.report_metadata["filter_by"]
if f["field_name"] == column["accessor"]
),
None,
)
isouter = True
conditions = [
ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
instance_metadata_alias.key == column["accessor"],
]
if filter_for_column:
isouter = False
conditions.append(
instance_metadata_alias.value == filter_for_column["field_value"]
)
process_instance_query = process_instance_query.join(
instance_metadata_alias, and_(*conditions), isouter=isouter
).add_columns(
func.max(instance_metadata_alias.value).label(column["accessor"])
)
order_by_query_array = []
order_by_array = process_instance_report.report_metadata["order_by"]
if len(order_by_array) < 1:
order_by_array = ProcessInstanceReportModel.default_order_by()
for order_by_option in order_by_array:
attribute = re.sub("^-", "", order_by_option)
if attribute in stock_columns:
if order_by_option.startswith("-"):
order_by_query_array.append(
getattr(ProcessInstanceModel, attribute).desc()
)
else:
order_by_query_array.append(
getattr(ProcessInstanceModel, attribute).asc()
)
elif attribute in instance_metadata_aliases:
if order_by_option.startswith("-"):
order_by_query_array.append(
func.max(instance_metadata_aliases[attribute].value).desc()
)
else:
order_by_query_array.append(
func.max(instance_metadata_aliases[attribute].value).asc()
)
# return process_instance_query
process_instances = (
process_instance_query.group_by(ProcessInstanceModel.id)
.add_columns(ProcessInstanceModel.id)
.order_by(*order_by_query_array)
.paginate(page=page, per_page=per_page, error_out=False)
)
results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
process_instances.items, process_instance_report.report_metadata["columns"]
)
response_json = {
"report": process_instance_report,
"results": results,
"filters": report_filter.to_dict(),
"pagination": {
"count": len(results),
"total": process_instances.total,
"pages": process_instances.pages,
},
}
return response_json

View File

@ -17,6 +17,7 @@ from spiffworkflow_backend.models.task import MultiInstanceType
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@ -36,7 +37,10 @@ class ProcessInstanceService:
user: UserModel,
) -> ProcessInstanceModel:
"""Get_process_instance_from_spec."""
current_git_revision = GitService.get_current_revision()
try:
current_git_revision = GitService.get_current_revision()
except GitCommandError:
current_git_revision = ""
process_instance_model = ProcessInstanceModel(
status=ProcessInstanceStatus.not_started.value,
process_initiator=user,
@ -210,7 +214,7 @@ class ProcessInstanceService:
dot_dct = ProcessInstanceService.create_dot_dict(data)
spiff_task.update_data(dot_dct)
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
processor.complete_task(spiff_task, human_task)
processor.complete_task(spiff_task, human_task, user=user)
processor.do_engine_steps(save=True)
@staticmethod

View File

@ -224,10 +224,10 @@ class ProcessModelService(FileSystemService):
new_process_model_list = []
for process_model in process_models:
uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
result = AuthorizationService.user_has_permission(
has_permission = AuthorizationService.user_has_permission(
user=user, permission="create", target_uri=uri
)
if result:
if has_permission:
new_process_model_list.append(process_model)
return new_process_model_list

View File

@ -13,6 +13,9 @@ from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.models.user_group_assignment_waiting import (
UserGroupAssignmentWaitingModel,
)
class UserService:
@ -21,11 +24,11 @@ class UserService:
@classmethod
def create_user(
cls,
username: str,
service: str,
service_id: str,
name: Optional[str] = "",
username: Optional[str] = "",
email: Optional[str] = "",
display_name: Optional[str] = "",
) -> UserModel:
"""Create_user."""
user_model: Optional[UserModel] = (
@ -41,8 +44,8 @@ class UserService:
username=username,
service=service,
service_id=service_id,
name=name,
email=email,
display_name=display_name,
)
db.session.add(user_model)
@ -55,6 +58,7 @@ class UserService:
message=f"Could not add user {username}",
) from e
cls.create_principal(user_model.id)
UserService().apply_waiting_group_assignments(user_model)
return user_model
else:
@ -69,45 +73,12 @@ class UserService:
)
)
@classmethod
def find_or_create_user(
cls,
service: str,
service_id: str,
name: Optional[str] = None,
username: Optional[str] = None,
email: Optional[str] = None,
) -> UserModel:
"""Find_or_create_user."""
user_model: UserModel
try:
user_model = cls.create_user(
service=service,
service_id=service_id,
name=name,
username=username,
email=email,
)
except ApiError:
user_model = (
UserModel.query.filter(UserModel.service == service)
.filter(UserModel.service_id == service_id)
.first()
)
return user_model
# Returns true if the current user is logged in.
@staticmethod
def has_user() -> bool:
"""Has_user."""
return "token" in g and bool(g.token) and "user" in g and bool(g.user)
# Returns true if the given user uid is different from the current user's uid.
@staticmethod
def is_different_user(uid: str) -> bool:
"""Is_different_user."""
return UserService.has_user() and uid is not None and uid is not g.user.uid
@staticmethod
def current_user() -> Any:
"""Current_user."""
@ -117,20 +88,6 @@ class UserService:
)
return g.user
@staticmethod
def in_list(uids: list[str]) -> bool:
"""Returns true if the current user's id is in the given list of ids.
False if there is no user, or the user is not in the list.
"""
if (
UserService.has_user()
): # If someone is logged in, lock tasks that don't belong to them.
user = UserService.current_user()
if user.uid in uids:
return True
return False
@staticmethod
def get_principal_by_user_id(user_id: int) -> PrincipalModel:
"""Get_principal_by_user_id."""
@ -173,8 +130,57 @@ class UserService:
@classmethod
def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None:
"""Add_user_to_group."""
ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
db.session.add(ugam)
exists = (
UserGroupAssignmentModel()
.query.filter_by(user_id=user.id)
.filter_by(group_id=group.id)
.count()
)
if not exists:
ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
db.session.add(ugam)
db.session.commit()
@classmethod
def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None:
"""Add_waiting_group_assignment."""
wugam = (
UserGroupAssignmentWaitingModel()
.query.filter_by(username=username)
.filter_by(group_id=group.id)
.first()
)
if not wugam:
wugam = UserGroupAssignmentWaitingModel(
username=username, group_id=group.id
)
db.session.add(wugam)
db.session.commit()
if wugam.is_match_all():
for user in UserModel.query.all():
cls.add_user_to_group(user, group)
@classmethod
def apply_waiting_group_assignments(cls, user: UserModel) -> None:
"""Apply_waiting_group_assignments."""
waiting = (
UserGroupAssignmentWaitingModel()
.query.filter(UserGroupAssignmentWaitingModel.username == user.username)
.all()
)
for assignment in waiting:
cls.add_user_to_group(user, assignment.group)
db.session.delete(assignment)
wildcard = (
UserGroupAssignmentWaitingModel()
.query.filter(
UserGroupAssignmentWaitingModel.username
== UserGroupAssignmentWaitingModel.MATCH_ALL_USERS
)
.all()
)
for assignment in wildcard:
cls.add_user_to_group(user, assignment.group)
db.session.commit()
@staticmethod

View File

@ -0,0 +1,75 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_hjecbuk" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0hnphp9</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0hnphp9" sourceRef="StartEvent_1" targetRef="Activity_16lbvwu" />
<bpmn:scriptTask id="Activity_16lbvwu">
<bpmn:incoming>Flow_0hnphp9</bpmn:incoming>
<bpmn:outgoing>Flow_0amajxh</bpmn:outgoing>
<bpmn:dataOutputAssociation id="DataOutputAssociation_15x55ya">
<bpmn:targetRef>DataObjectReference_10g8dit</bpmn:targetRef>
</bpmn:dataOutputAssociation>
<bpmn:script>the_data_object_var = 'hey'</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0amajxh" sourceRef="Activity_16lbvwu" targetRef="manual_task" />
<bpmn:endEvent id="Event_0ik0i72">
<bpmn:incoming>Flow_1ifqo6o</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1ifqo6o" sourceRef="manual_task" targetRef="Event_0ik0i72" />
<bpmn:manualTask id="manual_task">
<bpmn:incoming>Flow_0amajxh</bpmn:incoming>
<bpmn:outgoing>Flow_1ifqo6o</bpmn:outgoing>
<bpmn:property id="Property_0a8w16m" name="__targetRef_placeholder" />
<bpmn:dataInputAssociation id="DataInputAssociation_0iqtpwy">
<bpmn:sourceRef>DataObjectReference_10g8dit</bpmn:sourceRef>
<bpmn:targetRef>Property_0a8w16m</bpmn:targetRef>
</bpmn:dataInputAssociation>
</bpmn:manualTask>
<bpmn:dataObjectReference id="DataObjectReference_10g8dit" name="The Data Object Var" dataObjectRef="the_data_object_var" />
<bpmn:dataObject id="the_data_object_var" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_hjecbuk">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0wqvy5h_di" bpmnElement="Activity_16lbvwu">
<dc:Bounds x="290" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0ik0i72_di" bpmnElement="Event_0ik0i72">
<dc:Bounds x="652" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0keslpp_di" bpmnElement="manual_task">
<dc:Bounds x="470" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0hnphp9_di" bpmnElement="Flow_0hnphp9">
<di:waypoint x="215" y="177" />
<di:waypoint x="290" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0amajxh_di" bpmnElement="Flow_0amajxh">
<di:waypoint x="390" y="177" />
<di:waypoint x="470" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ifqo6o_di" bpmnElement="Flow_1ifqo6o">
<di:waypoint x="570" y="177" />
<di:waypoint x="652" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="DataObjectReference_10g8dit_di" bpmnElement="DataObjectReference_10g8dit">
<dc:Bounds x="412" y="275" width="36" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="390" y="332" width="81" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="DataInputAssociation_0iqtpwy_di" bpmnElement="DataInputAssociation_0iqtpwy">
<di:waypoint x="448" y="275" />
<di:waypoint x="491" y="217" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="DataOutputAssociation_15x55ya_di" bpmnElement="DataOutputAssociation_15x55ya">
<di:waypoint x="371" y="217" />
<di:waypoint x="416" y="275" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,86 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1ny7jp4" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
<bpmn:process id="sample" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_10jwwqy</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_10jwwqy" sourceRef="StartEvent_1" targetRef="script_task_one" />
<bpmn:endEvent id="Event_1qb1u6a">
<bpmn:incoming>Flow_1axnzv6</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="script_task_one" name="My Script" scriptFormat="python">
<bpmn:extensionElements>
<spiffworkflow:unitTests>
<spiffworkflow:unitTest id="ScriptTest_pass">
<spiffworkflow:inputJson>{
"current_user": {
"id": "2",
"username": "ciadmin1"
},
"num": 0
}</spiffworkflow:inputJson>
<spiffworkflow:expectedOutputJson>{
"Mike": "Awesome",
"i": 2,
"current_user": {
"id": "2",
"username": "ciadmin1"
},
"num": 0,
"my_var": "whatwhat",
"person": "Kevin"
}</spiffworkflow:expectedOutputJson>
</spiffworkflow:unitTest>
<spiffworkflow:unitTest id="ScriptTest_fail">
<spiffworkflow:inputJson>{}</spiffworkflow:inputJson>
<spiffworkflow:expectedOutputJson>{}</spiffworkflow:expectedOutputJson>
</spiffworkflow:unitTest>
<spiffworkflow:unitTest id="unit_test_5T42ZRC">
<spiffworkflow:inputJson>{"current_user": {"id": "1", "username": "kb"}}</spiffworkflow:inputJson>
<spiffworkflow:expectedOutputJson>{"Mike": "Awesome", "current_user": {"id": "1", "username": "kb"}, "heyhey": "https://demo.spiffworkflow.org", "i": 2, "members": [], "my_var": "whatwhat", "person": "Kevin"}</spiffworkflow:expectedOutputJson>
</spiffworkflow:unitTest>
</spiffworkflow:unitTests>
</bpmn:extensionElements>
<bpmn:incoming>Flow_10jwwqy</bpmn:incoming>
<bpmn:outgoing>Flow_1utkzvj</bpmn:outgoing>
<bpmn:script>my_var = 'THE VAR'</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1utkzvj" sourceRef="script_task_one" targetRef="script_task_two" />
<bpmn:sequenceFlow id="Flow_1axnzv6" sourceRef="script_task_two" targetRef="Event_1qb1u6a" />
<bpmn:scriptTask id="script_task_two" name="Bad News Bears">
<bpmn:incoming>Flow_1utkzvj</bpmn:incoming>
<bpmn:outgoing>Flow_1axnzv6</bpmn:outgoing>
<bpmn:script>hey</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="sample">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="132" y="102" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1qb1u6a_di" bpmnElement="Event_1qb1u6a">
<dc:Bounds x="612" y="102" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_17ohe7r_di" bpmnElement="script_task_one">
<dc:Bounds x="241" y="80" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_03fze1z_di" bpmnElement="script_task_two">
<dc:Bounds x="420" y="80" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_10jwwqy_di" bpmnElement="Flow_10jwwqy">
<di:waypoint x="168" y="120" />
<di:waypoint x="241" y="120" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1utkzvj_di" bpmnElement="Flow_1utkzvj">
<di:waypoint x="341" y="120" />
<di:waypoint x="420" y="120" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1axnzv6_di" bpmnElement="Flow_1axnzv6">
<di:waypoint x="520" y="120" />
<di:waypoint x="612" y="120" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_02u675m" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_01cweoc</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_01cweoc" sourceRef="StartEvent_1" targetRef="refresh_permission_script" />
<bpmn:endEvent id="Event_11584qn">
<bpmn:incoming>Flow_1xle2yo</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1xle2yo" sourceRef="refresh_permission_script" targetRef="Event_11584qn" />
<bpmn:scriptTask id="refresh_permission_script" name="Add Permission">
<bpmn:incoming>Flow_01cweoc</bpmn:incoming>
<bpmn:outgoing>Flow_1xle2yo</bpmn:outgoing>
<bpmn:script>refresh_permissions([])</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_02u675m">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_11584qn_di" bpmnElement="Event_11584qn">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ymj79t_di" bpmnElement="refresh_permission_script">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_01cweoc_di" bpmnElement="Flow_01cweoc">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1xle2yo_di" bpmnElement="Flow_1xle2yo">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -41,7 +41,7 @@ class BaseTest:
if isinstance(user, UserModel):
return user
user = UserService.create_user("internal", username, username=username)
user = UserService.create_user(username, "internal", username)
if isinstance(user, UserModel):
return user
@ -243,7 +243,7 @@ class BaseTest:
return file
@staticmethod
def create_process_instance_from_process_model_id(
def create_process_instance_from_process_model_id_with_api(
client: FlaskClient,
test_process_model_id: str,
headers: Dict[str, str],
@ -324,13 +324,9 @@ class BaseTest:
permission_names: Optional[list[str]] = None,
) -> UserModel:
"""Add_permissions_to_user."""
permission_target = PermissionTargetModel.query.filter_by(
uri=target_uri
).first()
if permission_target is None:
permission_target = PermissionTargetModel(uri=target_uri)
db.session.add(permission_target)
db.session.commit()
permission_target = AuthorizationService.find_or_create_permission_target(
target_uri
)
if permission_names is None:
permission_names = [member.name for member in Permission]

View File

@ -45,7 +45,7 @@ class TestLoggingService(BaseTest):
user=with_super_admin_user,
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None

View File

@ -38,7 +38,7 @@ class TestNestedGroups(BaseTest):
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
@ -99,7 +99,7 @@ class TestNestedGroups(BaseTest):
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),

View File

@ -1,4 +1,7 @@
"""Test_authentication."""
import base64
import jwt
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@ -44,13 +47,16 @@ class TestFlaskOpenId(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_get_token."""
code = "testadmin1:1234123412341234"
"""It should be possible to get a token."""
code = (
"c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx"
)
backend_basic_auth_string = code
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": f"Basic {code}",
"Authorization": f"Basic {backend_basic_auth.decode('utf-8')}",
}
data = {
"grant_type": "authorization_code",
@ -59,3 +65,13 @@ class TestFlaskOpenId(BaseTest):
}
response = client.post("/openid/token", data=data, headers=headers)
assert response
assert response.is_json
assert "access_token" in response.json
assert "id_token" in response.json
assert "refresh_token" in response.json
decoded_token = jwt.decode(
response.json["id_token"], options={"verify_signature": False}
)
assert "iss" in decoded_token
assert "email" in decoded_token

View File

@ -285,7 +285,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
# create an instance from a model
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
@ -1073,7 +1073,7 @@ class TestProcessApi(BaseTest):
"""Test_process_instance_create."""
test_process_model_id = "runs_without_input/sample"
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, test_process_model_id, headers
)
assert response.json is not None
@ -1103,7 +1103,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@ -1145,7 +1145,7 @@ class TestProcessApi(BaseTest):
self.modify_process_identifier_for_path_param(process_model_identifier)
)
headers = self.logged_in_headers(with_super_admin_user)
create_response = self.create_process_instance_from_process_model_id(
create_response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert create_response.json is not None
@ -1192,7 +1192,7 @@ class TestProcessApi(BaseTest):
self.modify_process_identifier_for_path_param(process_model_identifier)
)
headers = self.logged_in_headers(with_super_admin_user)
create_response = self.create_process_instance_from_process_model_id(
create_response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert create_response.json is not None
@ -1300,7 +1300,7 @@ class TestProcessApi(BaseTest):
"andThis": "another_item_non_key",
}
}
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
@ -1360,7 +1360,7 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
@ -1408,7 +1408,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@ -1449,7 +1449,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@ -1500,7 +1500,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
self.create_process_instance_from_process_model_id(
self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
@ -1547,19 +1547,19 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
headers = self.logged_in_headers(with_super_admin_user)
self.create_process_instance_from_process_model_id(
self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
self.create_process_instance_from_process_model_id(
self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
self.create_process_instance_from_process_model_id(
self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
self.create_process_instance_from_process_model_id(
self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
self.create_process_instance_from_process_model_id(
self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
@ -1873,7 +1873,7 @@ class TestProcessApi(BaseTest):
) -> Any:
"""Setup_testing_instance."""
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_id, headers
)
process_instance = response.json
@ -2042,6 +2042,36 @@ class TestProcessApi(BaseTest):
assert process is not None
assert process.status == "error"
def test_task_data_is_set_even_if_process_instance_errors(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_task_data_is_set_even_if_process_instance_errors."""
process_model = load_test_spec(
process_model_id="group/error_with_task_data",
bpmn_file_name="script_error_with_task_data.bpmn",
process_model_source_directory="error",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
assert process_instance.status == "error"
processor = ProcessInstanceProcessor(process_instance)
spiff_task = processor.get_task_by_bpmn_identifier(
"script_task_one", processor.bpmn_process_instance
)
assert spiff_task is not None
assert spiff_task.data != {}
def test_process_model_file_create(
self,
app: Flask,
@ -2196,7 +2226,7 @@ class TestProcessApi(BaseTest):
# process_group_id="finance",
# )
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client,
# process_model.process_group_id,
process_model_identifier,
@ -2405,7 +2435,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@ -2542,7 +2572,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
process_instance_id = response.json["id"]
@ -2611,7 +2641,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
process_instance_id = response.json["id"]
@ -3070,3 +3100,31 @@ class TestProcessApi(BaseTest):
assert len(response.json["results"]) == 2
assert response.json["results"][1]["id"] == process_instance_one.id
assert response.json["results"][0]["id"] == process_instance_two.id
def test_process_data_show(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_data_show."""
process_model = load_test_spec(
"test_group/data_object_test",
process_model_source_directory="data_object_test",
)
process_instance_one = self.create_process_instance_from_process_model(
process_model
)
processor = ProcessInstanceProcessor(process_instance_one)
processor.do_engine_steps(save=True)
assert process_instance_one.status == "user_input_required"
response = client.get(
f"/v1.0/process-data/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance_one.id}/the_data_object_var",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
assert response.json["process_data_value"] == "hey"

View File

@ -0,0 +1,60 @@
"""Test_get_localtime."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.scripts.get_all_permissions import GetAllPermissions
from spiffworkflow_backend.services.authorization_service import AuthorizationService
class TestGetAllPermissions(BaseTest):
"""TestGetAllPermissions."""
def test_can_get_all_permissions(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_all_permissions."""
self.find_or_create_user("test_user")
# now that we have everything, try to clear it out...
script_attributes_context = ScriptAttributesContext(
task=None,
environment_identifier="testing",
process_instance_id=1,
process_model_identifier="my_test_user",
)
AuthorizationService.add_permission_from_uri_or_macro(
permission="start", target="PG:hey:group", group_identifier="my_test_group"
)
AuthorizationService.add_permission_from_uri_or_macro(
permission="all", target="/tasks", group_identifier="my_test_group"
)
expected_permissions = [
{
"group_identifier": "my_test_group",
"uri": "/process-instances/hey:group:*",
"permissions": ["create"],
},
{
"group_identifier": "my_test_group",
"uri": "/process-instances/for-me/hey:group:*",
"permissions": ["read"],
},
{
"group_identifier": "my_test_group",
"uri": "/tasks",
"permissions": ["create", "read", "update", "delete"],
},
]
permissions = GetAllPermissions().run(script_attributes_context)
assert permissions == expected_permissions

View File

@ -68,7 +68,7 @@ class TestGetLocaltime(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
human_task = process_instance.human_tasks[0]
human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
human_task.task_name, processor.bpmn_process_instance
)
@ -81,7 +81,7 @@ class TestGetLocaltime(BaseTest):
human_task,
)
human_task = process_instance.human_tasks[0]
human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
human_task.task_name, processor.bpmn_process_instance
)

View File

@ -0,0 +1,50 @@
"""Test_get_localtime."""
import pytest
from flask.app import Flask
from flask.testing import FlaskClient
from flask_bpmn.api.api_error import ApiError
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
class TestRefreshPermissions(BaseTest):
"""TestRefreshPermissions."""
def test_refresh_permissions_requires_elevated_permission(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_refresh_permissions_requires_elevated_permission."""
basic_user = self.find_or_create_user("basic_user")
privileged_user = self.find_or_create_user("privileged_user")
self.add_permissions_to_user(
privileged_user,
target_uri="/can-run-privileged-script/refresh_permissions",
permission_names=["create"],
)
process_model = load_test_spec(
process_model_id="refresh_permissions",
process_model_source_directory="script_refresh_permissions",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=basic_user
)
processor = ProcessInstanceProcessor(process_instance)
with pytest.raises(ApiError) as exception:
processor.do_engine_steps(save=True)
assert "ScriptUnauthorizedForUserError" in str(exception)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=privileged_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert process_instance.status == "complete"

View File

@ -24,7 +24,6 @@ class TestSaveProcessInstanceMetadata(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_can_save_process_instance_metadata."""
initiator_user = self.find_or_create_user("initiator_user")
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
@ -34,7 +33,7 @@ class TestSaveProcessInstanceMetadata(BaseTest):
process_model_source_directory="save_process_instance_metadata",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)

View File

@ -4,9 +4,12 @@ from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.authorization_service import InvalidPermissionError
from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -14,6 +17,7 @@ from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.user_service import UserService
class TestAuthorizationService(BaseTest):
@ -121,7 +125,7 @@ class TestAuthorizationService(BaseTest):
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
human_task = process_instance.human_tasks[0]
human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
human_task.task_name, processor.bpmn_process_instance
)
@ -129,13 +133,286 @@ class TestAuthorizationService(BaseTest):
processor, spiff_task, {}, initiator_user, human_task
)
human_task = process_instance.human_tasks[0]
human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
human_task.task_name, processor.bpmn_process_instance
)
finance_user = AuthorizationService.create_user_from_sign_in(
{"username": "testuser2", "sub": "open_id"}
{
"username": "testuser2",
"sub": "testuser2",
"iss": "https://test.stuff",
"email": "testuser2",
}
)
ProcessInstanceService.complete_form_task(
processor, spiff_task, {}, finance_user, human_task
)
def test_explode_permissions_all_on_process_group(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_all_on_process_group."""
expected_permissions = [
("/logs/some-process-group:some-process-model:*", "read"),
("/process-data/some-process-group:some-process-model:*", "read"),
("/process-groups/some-process-group:some-process-model:*", "create"),
("/process-groups/some-process-group:some-process-model:*", "delete"),
("/process-groups/some-process-group:some-process-model:*", "read"),
("/process-groups/some-process-group:some-process-model:*", "update"),
(
"/process-instance-suspend/some-process-group:some-process-model:*",
"create",
),
(
"/process-instance-terminate/some-process-group:some-process-model:*",
"create",
),
("/process-instances/some-process-group:some-process-model:*", "create"),
("/process-instances/some-process-group:some-process-model:*", "delete"),
("/process-instances/some-process-group:some-process-model:*", "read"),
("/process-models/some-process-group:some-process-model:*", "create"),
("/process-models/some-process-group:some-process-model:*", "delete"),
("/process-models/some-process-group:some-process-model:*", "read"),
("/process-models/some-process-group:some-process-model:*", "update"),
("/task-data/some-process-group:some-process-model:*", "read"),
("/task-data/some-process-group:some-process-model:*", "update"),
]
permissions_to_assign = AuthorizationService.explode_permissions(
"all", "PG:/some-process-group/some-process-model"
)
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_start_on_process_group(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_start_on_process_group."""
expected_permissions = [
(
"/process-instances/for-me/some-process-group:some-process-model:*",
"read",
),
("/process-instances/some-process-group:some-process-model:*", "create"),
]
permissions_to_assign = AuthorizationService.explode_permissions(
"start", "PG:/some-process-group/some-process-model"
)
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_all_on_process_model(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_all_on_process_model."""
expected_permissions = [
("/logs/some-process-group:some-process-model/*", "read"),
("/process-data/some-process-group:some-process-model/*", "read"),
(
"/process-instance-suspend/some-process-group:some-process-model/*",
"create",
),
(
"/process-instance-terminate/some-process-group:some-process-model/*",
"create",
),
("/process-instances/some-process-group:some-process-model/*", "create"),
("/process-instances/some-process-group:some-process-model/*", "delete"),
("/process-instances/some-process-group:some-process-model/*", "read"),
("/process-models/some-process-group:some-process-model/*", "create"),
("/process-models/some-process-group:some-process-model/*", "delete"),
("/process-models/some-process-group:some-process-model/*", "read"),
("/process-models/some-process-group:some-process-model/*", "update"),
("/task-data/some-process-group:some-process-model/*", "read"),
("/task-data/some-process-group:some-process-model/*", "update"),
]
permissions_to_assign = AuthorizationService.explode_permissions(
"all", "PM:/some-process-group/some-process-model"
)
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_start_on_process_model(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_start_on_process_model."""
expected_permissions = [
(
"/process-instances/for-me/some-process-group:some-process-model/*",
"read",
),
("/process-instances/some-process-group:some-process-model/*", "create"),
]
permissions_to_assign = AuthorizationService.explode_permissions(
"start", "PM:/some-process-group/some-process-model"
)
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_basic(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_basic."""
expected_permissions = [
("/process-instances/for-me", "read"),
("/process-instances/reports/*", "create"),
("/process-instances/reports/*", "delete"),
("/process-instances/reports/*", "read"),
("/process-instances/reports/*", "update"),
("/processes", "read"),
("/service-tasks", "read"),
("/tasks/*", "create"),
("/tasks/*", "delete"),
("/tasks/*", "read"),
("/tasks/*", "update"),
("/user-groups/for-current-user", "read"),
]
permissions_to_assign = AuthorizationService.explode_permissions("all", "BASIC")
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_all(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_all."""
expected_permissions = [
("/*", "create"),
("/*", "delete"),
("/*", "read"),
("/*", "update"),
]
permissions_to_assign = AuthorizationService.explode_permissions("all", "ALL")
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_with_target_uri(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_with_target_uri."""
expected_permissions = [
("/hey/model", "create"),
("/hey/model", "delete"),
("/hey/model", "read"),
("/hey/model", "update"),
]
permissions_to_assign = AuthorizationService.explode_permissions(
"all", "/hey/model"
)
permissions_to_assign_tuples = sorted(
[(p.target_uri, p.permission) for p in permissions_to_assign]
)
assert permissions_to_assign_tuples == expected_permissions
def test_granting_access_to_group_gives_access_to_group_and_subgroups(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_granting_access_to_group_gives_access_to_group_and_subgroups."""
user = self.find_or_create_user(username="user_one")
user_group = GroupService.find_or_create_group("group_one")
UserService.add_user_to_group(user, user_group)
AuthorizationService.add_permission_from_uri_or_macro(
user_group.identifier, "read", "PG:hey"
)
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
def test_explode_permissions_with_invalid_target_uri(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_with_invalid_target_uri."""
with pytest.raises(InvalidPermissionError):
AuthorizationService.explode_permissions("all", "BAD_MACRO")
def test_explode_permissions_with_start_to_incorrect_target(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_with_start_to_incorrect_target."""
with pytest.raises(InvalidPermissionError):
AuthorizationService.explode_permissions("start", "/hey/model")
def test_can_refresh_permissions(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_can_refresh_permissions."""
user = self.find_or_create_user(username="user_one")
admin_user = self.find_or_create_user(username="testadmin1")
# this group is not mentioned so it will get deleted
GroupService.find_or_create_group("group_two")
assert GroupModel.query.filter_by(identifier="group_two").first() is not None
group_info = [
{
"users": ["user_one"],
"name": "group_one",
"permissions": [{"actions": ["create", "read"], "uri": "PG:hey"}],
}
]
AuthorizationService.refresh_permissions(group_info)
assert GroupModel.query.filter_by(identifier="group_two").first() is None
assert GroupModel.query.filter_by(identifier="group_one").first() is not None
self.assert_user_has_permission(admin_user, "create", "/anything-they-want")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo")
group_info = [
{
"users": ["user_one"],
"name": "group_one",
"permissions": [{"actions": ["read"], "uri": "PG:hey"}],
}
]
AuthorizationService.refresh_permissions(group_info)
assert GroupModel.query.filter_by(identifier="group_one").first() is not None
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
self.assert_user_has_permission(
user, "create", "/v1.0/process-groups/hey:yo", expected_result=False
)
self.assert_user_has_permission(admin_user, "create", "/anything-they-want")

View File

@ -37,7 +37,7 @@ class TestDotNotation(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id(
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
process_instance_id = response.json["id"]

View File

@ -31,10 +31,14 @@ class TestProcessInstanceProcessor(BaseTest):
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_script_engine_takes_data_and_returns_expected_results."""
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
script_engine = ProcessInstanceProcessor._script_engine
result = script_engine._evaluate("a", {"a": 1})
assert result == 1
app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
def test_script_engine_can_use_custom_scripts(
self,
@ -42,12 +46,16 @@ class TestProcessInstanceProcessor(BaseTest):
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_script_engine_takes_data_and_returns_expected_results."""
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
script_engine = ProcessInstanceProcessor._script_engine
result = script_engine._evaluate("fact_service(type='norris')", {})
assert (
result
== "Chuck Norris doesnt read books. He stares them down until he gets the information he wants."
)
app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
def test_sets_permission_correctly_on_human_task(
self,
@ -80,8 +88,8 @@ class TestProcessInstanceProcessor(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id is None
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == initiator_user
@ -97,8 +105,8 @@ class TestProcessInstanceProcessor(BaseTest):
processor, spiff_task, {}, initiator_user, human_task
)
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id == finance_group.id
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == finance_user
@ -114,8 +122,8 @@ class TestProcessInstanceProcessor(BaseTest):
ProcessInstanceService.complete_form_task(
processor, spiff_task, {}, finance_user, human_task
)
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id is None
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == initiator_user
@ -163,8 +171,8 @@ class TestProcessInstanceProcessor(BaseTest):
processor.do_engine_steps(save=True)
processor.save()
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id is None
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == initiator_user
@ -179,9 +187,10 @@ class TestProcessInstanceProcessor(BaseTest):
ProcessInstanceService.complete_form_task(
processor, spiff_task, {}, initiator_user, human_task
)
assert human_task.completed_by_user_id == initiator_user.id
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id is None
assert len(human_task.potential_owners) == 2
assert human_task.potential_owners == [finance_user_three, finance_user_four]
@ -198,8 +207,9 @@ class TestProcessInstanceProcessor(BaseTest):
ProcessInstanceService.complete_form_task(
processor, spiff_task, {}, finance_user_three, human_task
)
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert human_task.completed_by_user_id == finance_user_three.id
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id is None
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == finance_user_four
@ -215,8 +225,9 @@ class TestProcessInstanceProcessor(BaseTest):
ProcessInstanceService.complete_form_task(
processor, spiff_task, {}, finance_user_four, human_task
)
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert human_task.completed_by_user_id == finance_user_four.id
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert human_task.lane_assignment_id is None
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == initiator_user
@ -228,8 +239,8 @@ class TestProcessInstanceProcessor(BaseTest):
processor, spiff_task, {}, initiator_user, human_task
)
assert len(process_instance.human_tasks) == 1
human_task = process_instance.human_tasks[0]
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
human_task.task_name, processor.bpmn_process_instance
)
@ -250,7 +261,7 @@ class TestProcessInstanceProcessor(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_sets_permission_correctly_on_human_task_when_using_dict."""
"""Test_does_not_recreate_human_tasks_on_multiple_saves."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
@ -273,11 +284,11 @@ class TestProcessInstanceProcessor(BaseTest):
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert len(process_instance.human_tasks) == 1
initial_human_task_id = process_instance.human_tasks[0].id
assert len(process_instance.active_human_tasks) == 1
initial_human_task_id = process_instance.active_human_tasks[0].id
# save again to ensure we go attempt to process the human tasks again
processor.save()
assert len(process_instance.human_tasks) == 1
assert initial_human_task_id == process_instance.human_tasks[0].id
assert len(process_instance.active_human_tasks) == 1
assert initial_human_task_id == process_instance.active_human_tasks[0].id

View File

@ -3,8 +3,12 @@ from typing import Optional
from flask import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -15,6 +19,7 @@ from spiffworkflow_backend.services.process_instance_report_service import (
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportService,
)
from spiffworkflow_backend.services.user_service import UserService
class TestProcessInstanceReportFilter(BaseTest):
@ -122,13 +127,13 @@ class TestProcessInstanceReportService(BaseTest):
report_metadata=report_metadata,
)
return ProcessInstanceReportService.filter_from_metadata_with_overrides(
report,
process_model_identifier,
start_from,
start_to,
end_from,
end_to,
process_status,
process_instance_report=report,
process_model_identifier=process_model_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
)
def _filter_by_dict_from_metadata(self, report_metadata: dict) -> dict[str, str]:
@ -743,3 +748,383 @@ class TestProcessInstanceReportService(BaseTest):
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["sue"]
def test_can_filter_by_completed_instances_initiated_by_me(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_can_filter_by_completed_instances_initiated_by_me."""
process_model_id = "runs_without_input/sample"
bpmn_file_location = "sample"
process_model = load_test_spec(
process_model_id,
process_model_source_directory=bpmn_file_location,
)
user_one = self.find_or_create_user(username="user_one")
user_two = self.find_or_create_user(username="user_two")
# Several processes to ensure they do not return in the result
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_one
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one,
report_identifier="system_report_completed_instances_initiated_by_me",
)
report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
)
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
user=user_one,
)
assert len(response_json["results"]) == 2
assert response_json["results"][0]["process_initiator_id"] == user_one.id
assert response_json["results"][1]["process_initiator_id"] == user_one.id
assert response_json["results"][0]["status"] == "complete"
assert response_json["results"][1]["status"] == "complete"
def test_can_filter_by_completed_instances_with_tasks_completed_by_me(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_can_filter_by_completed_instances_with_tasks_completed_by_me."""
process_model_id = "runs_without_input/sample"
bpmn_file_location = "sample"
process_model = load_test_spec(
process_model_id,
process_model_source_directory=bpmn_file_location,
)
user_one = self.find_or_create_user(username="user_one")
user_two = self.find_or_create_user(username="user_two")
# Several processes to ensure they do not return in the result
process_instance_created_by_user_one_one = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
process_instance_created_by_user_one_three = (
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_one
)
)
process_instance_created_by_user_two_one = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_two
)
human_task_for_user_one_one = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_one.id,
completed_by_user_id=user_one.id,
)
human_task_for_user_one_two = HumanTaskModel(
process_instance_id=process_instance_created_by_user_two_one.id,
completed_by_user_id=user_one.id,
)
human_task_for_user_one_three = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_three.id,
completed_by_user_id=user_one.id,
)
human_task_for_user_two_one = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_one.id,
completed_by_user_id=user_two.id,
)
human_task_for_user_two_two = HumanTaskModel(
process_instance_id=process_instance_created_by_user_two_one.id,
completed_by_user_id=user_two.id,
)
human_task_for_user_two_three = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_three.id,
completed_by_user_id=user_two.id,
)
db.session.add(human_task_for_user_one_one)
db.session.add(human_task_for_user_one_two)
db.session.add(human_task_for_user_one_three)
db.session.add(human_task_for_user_two_one)
db.session.add(human_task_for_user_two_two)
db.session.add(human_task_for_user_two_three)
db.session.commit()
process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one,
report_identifier="system_report_completed_instances_with_tasks_completed_by_me",
)
report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
)
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
user=user_one,
)
assert len(response_json["results"]) == 1
assert response_json["results"][0]["process_initiator_id"] == user_two.id
assert (
response_json["results"][0]["id"]
== process_instance_created_by_user_two_one.id
)
assert response_json["results"][0]["status"] == "complete"
def test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups."""
process_model_id = "runs_without_input/sample"
bpmn_file_location = "sample"
process_model = load_test_spec(
process_model_id,
process_model_source_directory=bpmn_file_location,
)
user_group_one = GroupModel(identifier="group_one")
user_group_two = GroupModel(identifier="group_two")
db.session.add(user_group_one)
db.session.add(user_group_two)
db.session.commit()
user_one = self.find_or_create_user(username="user_one")
user_two = self.find_or_create_user(username="user_two")
user_three = self.find_or_create_user(username="user_three")
UserService.add_user_to_group(user_one, user_group_one)
UserService.add_user_to_group(user_two, user_group_one)
UserService.add_user_to_group(user_three, user_group_two)
# Several processes to ensure they do not return in the result
process_instance_created_by_user_one_one = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
process_instance_created_by_user_one_three = (
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_one
)
)
process_instance_created_by_user_two_one = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_two
)
human_task_for_user_group_one_one = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_one.id,
lane_assignment_id=user_group_one.id,
)
human_task_for_user_group_one_two = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_three.id,
lane_assignment_id=user_group_one.id,
)
human_task_for_user_group_one_three = HumanTaskModel(
process_instance_id=process_instance_created_by_user_two_one.id,
lane_assignment_id=user_group_one.id,
)
human_task_for_user_group_two_one = HumanTaskModel(
process_instance_id=process_instance_created_by_user_two_one.id,
lane_assignment_id=user_group_two.id,
)
human_task_for_user_group_two_two = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_one.id,
lane_assignment_id=user_group_two.id,
)
db.session.add(human_task_for_user_group_one_one)
db.session.add(human_task_for_user_group_one_two)
db.session.add(human_task_for_user_group_one_three)
db.session.add(human_task_for_user_group_two_one)
db.session.add(human_task_for_user_group_two_two)
db.session.commit()
process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one,
report_identifier="system_report_completed_instances_with_tasks_completed_by_my_groups",
)
report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
)
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
user=user_one,
)
assert len(response_json["results"]) == 2
assert response_json["results"][0]["process_initiator_id"] == user_two.id
assert (
response_json["results"][0]["id"]
== process_instance_created_by_user_two_one.id
)
assert response_json["results"][0]["status"] == "complete"
assert response_json["results"][1]["process_initiator_id"] == user_one.id
assert (
response_json["results"][1]["id"]
== process_instance_created_by_user_one_one.id
)
assert response_json["results"][1]["status"] == "complete"
def test_can_filter_by_with_relation_to_me(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_can_filter_by_with_relation_to_me."""
process_model_id = "runs_without_input/sample"
bpmn_file_location = "sample"
process_model = load_test_spec(
process_model_id,
process_model_source_directory=bpmn_file_location,
)
user_group_one = GroupModel(identifier="group_one")
user_group_two = GroupModel(identifier="group_two")
db.session.add(user_group_one)
db.session.add(user_group_two)
db.session.commit()
user_one = self.find_or_create_user(username="user_one")
user_two = self.find_or_create_user(username="user_two")
user_three = self.find_or_create_user(username="user_three")
UserService.add_user_to_group(user_one, user_group_one)
UserService.add_user_to_group(user_two, user_group_one)
UserService.add_user_to_group(user_three, user_group_two)
# Several processes to ensure they do not return in the result
process_instance_created_by_user_one_one = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
)
process_instance_created_by_user_one_two = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_one
)
)
process_instance_created_by_user_one_three = (
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_one
)
)
process_instance_created_by_user_two_one = (
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
)
self.create_process_instance_from_process_model(
process_model=process_model, status="complete", user=user_two
)
self.create_process_instance_from_process_model(
process_model=process_model, status="waiting", user=user_two
)
human_task_for_user_group_one_one = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_one.id,
lane_assignment_id=user_group_one.id,
)
human_task_for_user_group_one_two = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_three.id,
lane_assignment_id=user_group_one.id,
)
human_task_for_user_group_one_three = HumanTaskModel(
process_instance_id=process_instance_created_by_user_two_one.id,
lane_assignment_id=user_group_one.id,
)
human_task_for_user_group_two_one = HumanTaskModel(
process_instance_id=process_instance_created_by_user_two_one.id,
lane_assignment_id=user_group_two.id,
)
human_task_for_user_group_two_two = HumanTaskModel(
process_instance_id=process_instance_created_by_user_one_one.id,
lane_assignment_id=user_group_two.id,
)
db.session.add(human_task_for_user_group_one_one)
db.session.add(human_task_for_user_group_one_two)
db.session.add(human_task_for_user_group_one_three)
db.session.add(human_task_for_user_group_two_one)
db.session.add(human_task_for_user_group_two_two)
db.session.commit()
UserService.add_user_to_human_tasks_if_appropriate(user_one)
process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one
)
report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
with_relation_to_me=True,
)
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
user=user_one,
)
assert len(response_json["results"]) == 4
process_instance_ids_in_results = [r["id"] for r in response_json["results"]]
assert (
process_instance_created_by_user_one_one.id
in process_instance_ids_in_results
)
assert (
process_instance_created_by_user_one_two.id
in process_instance_ids_in_results
)
assert (
process_instance_created_by_user_one_three.id
in process_instance_ids_in_results
)
assert (
process_instance_created_by_user_two_one.id
in process_instance_ids_in_results
)

View File

@ -0,0 +1,54 @@
"""Process Model."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.user_group_assignment_waiting import (
UserGroupAssignmentWaitingModel,
)
from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.user_service import UserService
class TestUserService(BaseTest):
"""TestUserService."""
def test_assigning_a_group_to_a_user_before_the_user_is_created(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_waiting_group_assignments."""
a_test_group = GroupService.find_or_create_group("aTestGroup")
UserService.add_waiting_group_assignment("initiator_user", a_test_group)
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.groups[0] == a_test_group
def test_assigning_a_group_to_all_users_updates_new_users(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_waiting_group_assignments."""
everybody_group = GroupService.find_or_create_group("everybodyGroup")
UserService.add_waiting_group_assignment(
UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group
)
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.groups[0] == everybody_group
def test_assigning_a_group_to_all_users_updates_existing_users(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_waiting_group_assignments."""
initiator_user = self.find_or_create_user("initiator_user")
everybody_group = GroupService.find_or_create_group("everybodyGroup")
UserService.add_waiting_group_assignment(
UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group
)
assert initiator_user.groups[0] == everybody_group

View File

@ -1,12 +1,41 @@
/* eslint-disable */
const { defineConfig } = require('cypress');
const { rm } = require('fs/promises')
// yes use video compression in CI, where we will set the env var so we upload to cypress dashboard
const useVideoCompression = !!process.env.CYPRESS_RECORD_KEY
// https://github.com/cypress-io/cypress/issues/2522
const deleteVideosOnSuccess = (on) => {
const filesToDelete = []
on('after:spec', (_spec, results) => {
if (results.stats.failures === 0 && results.video) {
filesToDelete.push(results.video)
}
})
on('after:run', async () => {
if (filesToDelete.length) {
console.log(
'after:run hook: Deleting %d video(s) from successful specs',
filesToDelete.length
)
await Promise.all(filesToDelete.map((videoFile) => rm(videoFile)))
}
})
}
module.exports = defineConfig({
projectId: 'crax1q',
// since it's slow
videoCompression: useVideoCompression,
videoUploadOnPasses: false,
chromeWebSecurity: false,
e2e: {
baseUrl: 'http://localhost:7001',
setupNodeEvents(_on, config) {
setupNodeEvents(on, config) {
deleteVideosOnSuccess(on)
require('@cypress/grep/src/plugin')(config);
return config;
},

View File

@ -30,7 +30,10 @@ describe('process-groups', () => {
.find('.cds--btn--danger')
.click();
cy.url().should('include', `process-groups`);
cy.contains(groupId).should('not.exist');
cy.contains(newGroupDisplayName).should('not.exist');
// meaning the process group list page is loaded, so we can sign out safely without worrying about ajax requests failing
cy.get('.tile-process-group-content-container').should('exist');
});
// process groups no longer has pagination post-tiles

View File

@ -68,8 +68,7 @@ describe('process-instances', () => {
cy.login();
cy.navigateToProcessModel(
'Acceptance Tests Group One',
'Acceptance Tests Model 1',
'acceptance-tests-model-1'
'Acceptance Tests Model 1'
);
});
afterEach(() => {
@ -80,6 +79,7 @@ describe('process-instances', () => {
const originalDmnOutputForKevin = 'Very wonderful';
const newDmnOutputForKevin = 'The new wonderful';
const dmnOutputForDan = 'pretty wonderful';
const acceptanceTestOneDisplayName = 'Acceptance Tests Model 1';
const originalPythonScript = 'person = "Kevin"';
const newPythonScript = 'person = "Dan"';
@ -95,13 +95,13 @@ describe('process-instances', () => {
cy.getBySel(`edit-file-${dmnFile.replace('.', '-')}`).click();
updateDmnText(originalDmnOutputForKevin, newDmnOutputForKevin);
cy.contains('acceptance-tests-model-1').click();
cy.contains(acceptanceTestOneDisplayName).click();
cy.runPrimaryBpmnFile();
cy.getBySel('files-accordion').click();
cy.getBySel(`edit-file-${dmnFile.replace('.', '-')}`).click();
updateDmnText(newDmnOutputForKevin, originalDmnOutputForKevin);
cy.contains('acceptance-tests-model-1').click();
cy.contains(acceptanceTestOneDisplayName).click();
cy.runPrimaryBpmnFile();
// Change bpmn
@ -109,13 +109,13 @@ describe('process-instances', () => {
cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click();
cy.contains(`Process Model File: ${bpmnFile}`);
updateBpmnPythonScript(newPythonScript);
cy.contains('acceptance-tests-model-1').click();
cy.contains(acceptanceTestOneDisplayName).click();
cy.runPrimaryBpmnFile();
cy.getBySel('files-accordion').click();
cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click();
updateBpmnPythonScript(originalPythonScript);
cy.contains('acceptance-tests-model-1').click();
cy.contains(acceptanceTestOneDisplayName).click();
cy.runPrimaryBpmnFile();
});
@ -160,6 +160,7 @@ describe('process-instances', () => {
cy.getBySel('process-instance-list-link').click();
cy.getBySel('process-instance-show-link').first().click();
cy.getBySel('process-instance-log-list-link').click();
cy.getBySel('process-instance-log-detailed').click();
cy.contains('process_model_one');
cy.contains('State change to COMPLETED');
cy.basicPaginationTest();
@ -167,6 +168,8 @@ describe('process-instances', () => {
it('can filter', () => {
cy.getBySel('process-instance-list-link').click();
cy.getBySel('process-instance-list-all').click();
cy.contains('All Process Instances');
cy.assertAtLeastOneItemInPaginatedResults();
const statusSelect = '#process-instance-status-select';
@ -174,6 +177,7 @@ describe('process-instances', () => {
if (!['all', 'waiting'].includes(processStatus)) {
cy.get(statusSelect).click();
cy.get(statusSelect).contains(processStatus).click();
cy.get(statusSelect).click();
cy.getBySel('filter-button').click();
// FIXME: wait a little bit for the useEffects to be able to fully set processInstanceFilters
cy.wait(1000);

View File

@ -13,11 +13,10 @@ const checkTaskHasClass = (taskName, className) => {
cy.get(`g[data-element-id=${taskName}]`).should('have.class', className);
};
const kickOffModelWithForm = (modelId, formName) => {
const kickOffModelWithForm = () => {
cy.navigateToProcessModel(
'Acceptance Tests Group One',
'Acceptance Tests Model 2',
'acceptance-tests-model-2'
'Acceptance Tests Model 2'
);
cy.runPrimaryBpmnFile(true);
};
@ -32,12 +31,11 @@ describe('tasks', () => {
it('can complete and navigate a form', () => {
const groupDisplayName = 'Acceptance Tests Group One';
const modelId = `acceptance-tests-model-2`;
const modelDisplayName = `Acceptance Tests Model 2`;
const completedTaskClassName = 'completed-task-highlight';
const activeTaskClassName = 'active-task-highlight';
cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId);
cy.navigateToProcessModel(groupDisplayName, modelDisplayName);
cy.runPrimaryBpmnFile(true);
submitInputIntoFormField(
@ -71,7 +69,7 @@ describe('tasks', () => {
);
cy.contains('Task: get_user_generated_number_four');
cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId);
cy.navigateToProcessModel(groupDisplayName, modelDisplayName);
cy.getBySel('process-instance-list-link').click();
cy.assertAtLeastOneItemInPaginatedResults();
@ -94,7 +92,7 @@ describe('tasks', () => {
cy.contains('Tasks').should('exist');
// FIXME: this will probably need a better way to link to the proper form that we want
cy.contains('Complete Task').click();
cy.contains('Go').click();
submitInputIntoFormField(
'get_user_generated_number_four',
@ -103,7 +101,7 @@ describe('tasks', () => {
);
cy.url().should('include', '/tasks');
cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId);
cy.navigateToProcessModel(groupDisplayName, modelDisplayName);
cy.getBySel('process-instance-list-link').click();
cy.assertAtLeastOneItemInPaginatedResults();

View File

@ -95,14 +95,16 @@ Cypress.Commands.add(
} else {
cy.contains(/Process Instance.*[kK]icked [oO]ff/);
cy.reload(true);
cy.contains('Process Model:').should('exist');
cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist');
cy.contains('[data-qa=process-model-show-permissions-loaded]', 'true');
}
}
);
Cypress.Commands.add(
'navigateToProcessModel',
(groupDisplayName, modelDisplayName, modelIdentifier) => {
(groupDisplayName, modelDisplayName) => {
cy.navigateToAdmin();
cy.contains(miscDisplayName).click();
cy.contains(`Process Group: ${miscDisplayName}`, { timeout: 10000 });
@ -120,10 +122,23 @@ Cypress.Commands.add('basicPaginationTest', () => {
// NOTE: this is a em dash instead of en dash
cy.contains(/\b12 of \d+/);
cy.get('.cds--pagination__button--forward').click();
cy.contains(/\b34 of \d+/);
cy.get('.cds--pagination__button--backward').click();
cy.contains(/\b12 of \d+/);
// ok, trying to ensure that we have everything loaded before we leave this
// function and try to sign out. Just showing results 1-2 of blah is not good enough,
// since the ajax request may not have finished yet.
// to be sure it's finished, grab the log id from page 1. remember it.
// then use the magical contains command that waits for the element to exist AND
// for that element to contain the text we're looking for.
cy.getBySel('paginated-entity-id')
.first()
.then(($element) => {
const oldId = $element.text().trim();
cy.get('.cds--pagination__button--forward').click();
cy.contains(/\b34 of \d+/);
cy.get('.cds--pagination__button--backward').click();
cy.contains(/\b12 of \d+/);
cy.contains('[data-qa=paginated-entity-id]', oldId);
});
});
Cypress.Commands.add('assertAtLeastOneItemInPaginatedResults', () => {

View File

@ -7,7 +7,7 @@
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Frontend for managing and running business processes with spiffworkflow"
content="A turnkey solution for building and executing the workflows that drive your business"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
@ -24,7 +24,7 @@
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>spiffworkflow-frontend</title>
<title>SpiffWorkflow</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>

View File

@ -14,15 +14,14 @@ import { ErrorForDisplay } from './interfaces';
import { AbilityContext } from './contexts/Can';
import UserService from './services/UserService';
import { Notification } from './components/Notification';
export default function App() {
const [errorMessage, setErrorMessage] = useState<ErrorForDisplay | null>(
null
);
const [errorObject, setErrorObject] = useState<ErrorForDisplay | null>(null);
const errorContextValueArray = useMemo(
() => [errorMessage, setErrorMessage],
[errorMessage]
() => [errorObject, setErrorObject],
[errorObject]
);
if (!UserService.isLoggedIn()) {
@ -33,25 +32,48 @@ export default function App() {
const ability = defineAbility(() => {});
let errorTag = null;
if (errorMessage) {
if (errorObject) {
let sentryLinkTag = null;
if (errorMessage.sentry_link) {
if (errorObject.sentry_link) {
sentryLinkTag = (
<span>
{
': Find details about this error here (it may take a moment to become available): '
}
<a href={errorMessage.sentry_link} target="_blank" rel="noreferrer">
{errorMessage.sentry_link}
<a href={errorObject.sentry_link} target="_blank" rel="noreferrer">
{errorObject.sentry_link}
</a>
</span>
);
}
let message = <div>{errorObject.message}</div>;
let title = 'Error:';
if ('task_name' in errorObject) {
title = `Error in python script:`;
message = (
<>
<br />
<div>
Task: {errorObject.task_name} ({errorObject.task_id})
</div>
<div>File name: {errorObject.file_name}</div>
<div>Line number in script task: {errorObject.line_number}</div>
<br />
<div>{errorObject.message}</div>
</>
);
}
errorTag = (
<div id="filter-errors" className="mt-4 alert alert-danger" role="alert">
{errorMessage.message}
<Notification
title={title}
onClose={() => setErrorObject(null)}
type="error"
>
{message}
{sentryLinkTag}
</div>
</Notification>
);
}

View File

@ -8,7 +8,7 @@ export default function MyCompletedInstances() {
filtersEnabled={false}
paginationQueryParamPrefix={paginationQueryParamPrefix}
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_instances_initiated_by_me"
reportIdentifier="system_report_completed_instances_initiated_by_me"
showReports={false}
/>
);

View File

@ -1,6 +1,10 @@
import React from 'react';
// @ts-ignore
import { Close, CheckmarkFilled } from '@carbon/icons-react';
import {
Close,
Checkmark,
Error,
// @ts-ignore
} from '@carbon/icons-react';
// @ts-ignore
import { Button } from '@carbon/react';
@ -17,9 +21,9 @@ export function Notification({
onClose,
type = 'success',
}: OwnProps) {
let iconClassName = 'green-icon';
let iconComponent = <Checkmark className="notification-icon" />;
if (type === 'error') {
iconClassName = 'red-icon';
iconComponent = <Error className="notification-icon" />;
}
return (
<div
@ -28,7 +32,7 @@ export function Notification({
>
<div className="cds--inline-notification__details">
<div className="cds--inline-notification__text-wrapper">
<CheckmarkFilled className={`${iconClassName} notification-icon`} />
{iconComponent}
<div className="cds--inline-notification__title">{title}</div>
<div className="cds--inline-notification__subtitle">{children}</div>
</div>

View File

@ -0,0 +1,29 @@
import { ProcessInstanceReport } from '../interfaces';
import HttpService from '../services/HttpService';
import ButtonWithConfirmation from './ButtonWithConfirmation';
type OwnProps = {
onSuccess: (..._args: any[]) => any;
processInstanceReportSelection: ProcessInstanceReport;
};
export default function ProcessInstanceListDeleteReport({
onSuccess,
processInstanceReportSelection,
}: OwnProps) {
const deleteProcessInstanceReport = () => {
HttpService.makeCallToBackend({
path: `/process-instances/reports/${processInstanceReportSelection.id}`,
successCallback: onSuccess,
httpMethod: 'DELETE',
});
};
return (
<ButtonWithConfirmation
description={`Delete Perspective ${processInstanceReportSelection.identifier}?`}
onConfirmation={deleteProcessInstanceReport}
buttonLabel="Delete"
/>
);
}

View File

@ -40,6 +40,7 @@ import {
getProcessModelFullIdentifierFromSearchParams,
modifyProcessIdentifierForPathParam,
refreshAtInterval,
setErrorMessageSafely,
} from '../helpers';
import PaginationForTable from './PaginationForTable';
@ -62,6 +63,7 @@ import {
} from '../interfaces';
import ProcessModelSearch from './ProcessModelSearch';
import ProcessInstanceReportSearch from './ProcessInstanceReportSearch';
import ProcessInstanceListDeleteReport from './ProcessInstanceListDeleteReport';
import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport';
import { FormatProcessModelDisplayName } from './MiniComponents';
import { Notification } from './Notification';
@ -130,11 +132,11 @@ export default function ProcessInstanceListTable({
const [endFromTimeInvalid, setEndFromTimeInvalid] = useState<boolean>(false);
const [endToTimeInvalid, setEndToTimeInvalid] = useState<boolean>(false);
const setErrorMessage = (useContext as any)(ErrorContext)[1];
const [errorObject, setErrorObject] = (useContext as any)(ErrorContext);
const processInstancePathPrefix =
variant === 'all'
? '/admin/process-instances'
? '/admin/process-instances/all'
: '/admin/process-instances/for-me';
const [processStatusAllOptions, setProcessStatusAllOptions] = useState<any[]>(
@ -428,8 +430,11 @@ export default function ProcessInstanceListTable({
}
};
// TODO: after factoring this out page hangs when invalid date ranges and applying the filter
const calculateStartAndEndSeconds = () => {
// jasquat/burnettk - 2022-12-28 do not check the validity of the dates when rendering components to avoid the page being
// re-rendered while the user is still typing. NOTE that we also prevented rerendering
// with the use of the setErrorMessageSafely function. we are not sure why the context not
// changing still causes things to rerender when we call its setter without our extra check.
const calculateStartAndEndSeconds = (validate: boolean = true) => {
const startFromSeconds = convertDateAndTimeStringsToSeconds(
startFromDate,
startFromTime || '00:00:00'
@ -447,29 +452,25 @@ export default function ProcessInstanceListTable({
endToTime || '00:00:00'
);
let valid = true;
if (isTrueComparison(startFromSeconds, '>', startToSeconds)) {
setErrorMessage({
message: '"Start date from" cannot be after "start date to"',
});
valid = false;
}
if (isTrueComparison(endFromSeconds, '>', endToSeconds)) {
setErrorMessage({
message: '"End date from" cannot be after "end date to"',
});
valid = false;
}
if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) {
setErrorMessage({
message: '"Start date from" cannot be after "end date from"',
});
valid = false;
}
if (isTrueComparison(startToSeconds, '>', endToSeconds)) {
setErrorMessage({
message: '"Start date to" cannot be after "end date to"',
});
valid = false;
if (validate) {
let message = '';
if (isTrueComparison(startFromSeconds, '>', startToSeconds)) {
message = '"Start date from" cannot be after "start date to"';
}
if (isTrueComparison(endFromSeconds, '>', endToSeconds)) {
message = '"End date from" cannot be after "end date to"';
}
if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) {
message = '"Start date from" cannot be after "end date from"';
}
if (isTrueComparison(startToSeconds, '>', endToSeconds)) {
message = '"Start date to" cannot be after "end date to"';
}
if (message !== '') {
valid = false;
setErrorMessageSafely(message, errorObject, setErrorObject);
}
}
return {
@ -526,7 +527,7 @@ export default function ProcessInstanceListTable({
queryParamString += `&report_id=${processInstanceReportSelection.id}`;
}
setErrorMessage(null);
setErrorObject(null);
setProcessInstanceReportJustSaved(null);
navigate(`${processInstancePathPrefix}?${queryParamString}`);
};
@ -625,7 +626,7 @@ export default function ProcessInstanceListTable({
queryParamString = `?report_id=${selectedReport.id}`;
}
setErrorMessage(null);
setErrorObject(null);
setProcessInstanceReportJustSaved(mode || null);
navigate(`${processInstancePathPrefix}${queryParamString}`);
};
@ -657,7 +658,7 @@ export default function ProcessInstanceListTable({
startToSeconds,
endFromSeconds,
endToSeconds,
} = calculateStartAndEndSeconds();
} = calculateStartAndEndSeconds(false);
if (!valid || !reportMetadata) {
return null;
@ -681,6 +682,19 @@ export default function ProcessInstanceListTable({
);
};
const onDeleteReportSuccess = () => {
processInstanceReportDidChange({ selectedItem: null });
};
const deleteReportComponent = () => {
return processInstanceReportSelection ? (
<ProcessInstanceListDeleteReport
onSuccess={onDeleteReportSuccess}
processInstanceReportSelection={processInstanceReportSelection}
/>
) : null;
};
const removeColumn = (reportColumn: ReportColumn) => {
if (reportMetadata) {
const reportMetadataCopy = { ...reportMetadata };
@ -1062,6 +1076,7 @@ export default function ProcessInstanceListTable({
</Column>
<Column sm={4} md={4} lg={8}>
{saveAsReportComponent()}
{deleteReportComponent()}
</Column>
</Grid>
</>
@ -1096,7 +1111,7 @@ export default function ProcessInstanceListTable({
to={`${processInstancePathPrefix}/${modifiedProcessModelId}/${id}`}
title={`View process instance ${id}`}
>
{id}
<span data-qa="paginated-entity-id">{id}</span>
</Link>
);
};

View File

@ -78,7 +78,7 @@ export default function ProcessInstanceRun({
checkPermissions = true,
}: OwnProps) {
const navigate = useNavigate();
const setErrorMessage = (useContext as any)(ErrorContext)[1];
const setErrorObject = (useContext as any)(ErrorContext)[1];
const modifiedProcessModelId = modifyProcessIdentifierForPathParam(
processModel.id
);
@ -105,12 +105,12 @@ export default function ProcessInstanceRun({
};
const processModelRun = (processInstance: any) => {
setErrorMessage(null);
setErrorObject(null);
storeRecentProcessModelInLocalStorage(processModel);
HttpService.makeCallToBackend({
path: `/process-instances/${modifiedProcessModelId}/${processInstance.id}/run`,
successCallback: onProcessInstanceRun,
failureCallback: setErrorMessage,
failureCallback: setErrorObject,
httpMethod: 'POST',
});
};

View File

@ -11,6 +11,7 @@ import {
truncateString,
} from '../helpers';
import ProcessInstanceRun from './ProcessInstanceRun';
import { Notification } from './Notification';
type OwnProps = {
headerElement?: ReactElement;
@ -50,20 +51,19 @@ export default function ProcessModelListTiles({
const processInstanceRunResultTag = () => {
if (processInstance) {
return (
<div className="alert alert-success" role="alert">
<p>
Process Instance {processInstance.id} kicked off (
<Link
to={`/admin/process-instances/${modifyProcessIdentifierForPathParam(
processInstance.process_model_identifier
)}/${processInstance.id}`}
data-qa="process-instance-show-link"
>
view
</Link>
).
</p>
</div>
<Notification
title={`Process Instance ${processInstance.id} kicked off`}
onClose={() => setProcessInstance(null)}
>
<Link
to={`/admin/process-instances/${modifyProcessIdentifierForPathParam(
processInstance.process_model_identifier
)}/${processInstance.id}`}
data-qa="process-instance-show-link"
>
view
</Link>
</Notification>
);
}
return null;

View File

@ -94,7 +94,7 @@ export default function TaskListTable({
<td>
<Link
data-qa="process-instance-show-link"
to={`/admin/process-instances/${modifiedProcessModelIdentifier}/${rowToUse.process_instance_id}`}
to={`/admin/process-instances/for-me/${modifiedProcessModelIdentifier}/${rowToUse.process_instance_id}`}
title={`View process instance ${rowToUse.process_instance_id}`}
>
{rowToUse.process_instance_id}

View File

@ -21,7 +21,7 @@ export default function TasksWaitingForMyGroups() {
return (
<TaskListTable
apiPath="/tasks/for-my-groups"
additionalParams={`group_identifier=${userGroup}`}
additionalParams={`user_group_identifier=${userGroup}`}
paginationQueryParamPrefix={`group-tasks-${userGroup}`}
tableTitle={`Tasks waiting for group: ${userGroup}`}
tableDescription={`This is a list of tasks for the ${userGroup} group. They can be completed by any member of the group.`}

View File

@ -1,11 +1,23 @@
const host = window.location.hostname;
let hostAndPort = `api.${host}`;
const { port, hostname } = window.location;
let hostAndPort = `api.${hostname}`;
let protocol = 'https';
if (/^\d+\./.test(host) || host === 'localhost') {
hostAndPort = `${host}:7000`;
if (/^\d+\./.test(hostname) || hostname === 'localhost') {
let serverPort = 7000;
if (!Number.isNaN(Number(port))) {
serverPort = Number(port) - 1;
}
hostAndPort = `${hostname}:${serverPort}`;
protocol = 'http';
}
export const BACKEND_BASE_URL = `${protocol}://${hostAndPort}/v1.0`;
let url = `${protocol}://${hostAndPort}/v1.0`;
// Allow overriding the backend base url with an environment variable at build time.
if (process.env.REACT_APP_BACKEND_BASE_URL) {
url = process.env.REACT_APP_BACKEND_BASE_URL;
}
export const BACKEND_BASE_URL = url;
export const PROCESS_STATUSES = [
'not_started',

View File

@ -1,4 +1,8 @@
import { convertSecondsToFormattedDateString, slugifyString } from './helpers';
import {
convertSecondsToFormattedDateString,
slugifyString,
underscorizeString,
} from './helpers';
test('it can slugify a string', () => {
expect(slugifyString('hello---world_ and then Some such-')).toEqual(
@ -6,6 +10,12 @@ test('it can slugify a string', () => {
);
});
test('it can underscorize a string', () => {
expect(underscorizeString('hello---world_ and then Some such-')).toEqual(
'hello_world_and_then_some_such'
);
});
test('it can keep the correct date when converting seconds to date', () => {
const dateString = convertSecondsToFormattedDateString(1666325400);
expect(dateString).toEqual('2022-10-21');

View File

@ -8,6 +8,7 @@ import {
DEFAULT_PER_PAGE,
DEFAULT_PAGE,
} from './components/PaginationForTable';
import { ErrorForDisplay } from './interfaces';
// https://www.30secondsofcode.org/js/s/slugify
export const slugifyString = (str: any) => {
@ -20,6 +21,10 @@ export const slugifyString = (str: any) => {
.replace(/-+$/g, '');
};
export const underscorizeString = (inputString: string) => {
return slugifyString(inputString).replace(/-/g, '_');
};
export const capitalizeFirstLetter = (string: any) => {
return string.charAt(0).toUpperCase() + string.slice(1);
};
@ -234,3 +239,17 @@ export const getBpmnProcessIdentifiers = (rootBpmnElement: any) => {
childProcesses.push(rootBpmnElement.businessObject.id);
return childProcesses;
};
// Setting the error message state to the same string is still considered a change
// and re-renders the page so check the message first to avoid that.
export const setErrorMessageSafely = (
newErrorMessageString: string,
oldErrorMessage: ErrorForDisplay,
errorMessageSetter: any
) => {
if (oldErrorMessage && oldErrorMessage.message === newErrorMessageString) {
return null;
}
errorMessageSetter({ message: newErrorMessageString });
return null;
};

View File

@ -5,6 +5,11 @@ export interface Secret {
creator_user_id: string;
}
export interface ProcessData {
process_data_identifier: string;
process_data_value: any;
}
export interface RecentProcessModel {
processGroupIdentifier?: string;
processModelIdentifier: string;
@ -153,6 +158,10 @@ export type HotCrumbItem = HotCrumbItemArray | HotCrumbItemObject;
export interface ErrorForDisplay {
message: string;
sentry_link?: string;
task_name?: string;
task_id?: string;
line_number?: number;
file_name?: string;
}
export interface AuthenticationParam {

View File

@ -25,11 +25,11 @@ import JsonSchemaFormBuilder from './JsonSchemaFormBuilder';
export default function AdminRoutes() {
const location = useLocation();
const setErrorMessage = (useContext as any)(ErrorContext)[1];
const setErrorObject = (useContext as any)(ErrorContext)[1];
useEffect(() => {
setErrorMessage(null);
}, [location, setErrorMessage]);
setErrorObject(null);
}, [location, setErrorObject]);
if (UserService.hasRole(['admin'])) {
return (

View File

@ -7,7 +7,7 @@ import HttpService from '../services/HttpService';
import UserService from '../services/UserService';
export default function AuthenticationList() {
const setErrorMessage = (useContext as any)(ErrorContext)[1];
const setErrorObject = (useContext as any)(ErrorContext)[1];
const [authenticationList, setAuthenticationList] = useState<
AuthenticationItem[] | null
@ -26,9 +26,9 @@ export default function AuthenticationList() {
HttpService.makeCallToBackend({
path: `/authentications`,
successCallback: processResult,
failureCallback: setErrorMessage,
failureCallback: setErrorObject,
});
}, [setErrorMessage]);
}, [setErrorObject]);
const buildTable = () => {
if (authenticationList) {

View File

@ -30,10 +30,10 @@ export default function CompletedInstances() {
paginationQueryParamPrefix="group_completed_instances"
paginationClassName="with-large-bottom-margin"
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_instances_with_tasks_completed_by_my_groups"
reportIdentifier="system_report_completed_instances_with_tasks_completed_by_my_groups"
showReports={false}
textToShowIfEmpty="This group has no completed instances at this time."
additionalParams={`group_identifier=${userGroup}`}
additionalParams={`user_group_identifier=${userGroup}`}
/>
</>
);
@ -50,7 +50,7 @@ export default function CompletedInstances() {
filtersEnabled={false}
paginationQueryParamPrefix="my_completed_instances"
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_instances_initiated_by_me"
reportIdentifier="system_report_completed_instances_initiated_by_me"
showReports={false}
textToShowIfEmpty="You have no completed instances at this time."
paginationClassName="with-large-bottom-margin"
@ -64,7 +64,7 @@ export default function CompletedInstances() {
filtersEnabled={false}
paginationQueryParamPrefix="my_completed_tasks"
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_instances_with_tasks_completed_by_me"
reportIdentifier="system_report_completed_instances_with_tasks_completed_by_me"
showReports={false}
textToShowIfEmpty="You have no completed instances at this time."
paginationClassName="with-large-bottom-margin"

View File

@ -14,7 +14,7 @@ import { usePermissionFetcher } from '../hooks/PermissionService';
export default function Configuration() {
const location = useLocation();
const setErrorMessage = (useContext as any)(ErrorContext)[1];
const setErrorObject = (useContext as any)(ErrorContext)[1];
const [selectedTabIndex, setSelectedTabIndex] = useState<number>(0);
const navigate = useNavigate();
@ -26,13 +26,13 @@ export default function Configuration() {
const { ability } = usePermissionFetcher(permissionRequestData);
useEffect(() => {
setErrorMessage(null);
setErrorObject(null);
let newSelectedTabIndex = 0;
if (location.pathname.match(/^\/admin\/configuration\/authentications\b/)) {
newSelectedTabIndex = 1;
}
setSelectedTabIndex(newSelectedTabIndex);
}, [location, setErrorMessage]);
}, [location, setErrorObject]);
return (
<>

View File

@ -11,12 +11,12 @@ import CreateNewInstance from './CreateNewInstance';
export default function HomePageRoutes() {
const location = useLocation();
const setErrorMessage = (useContext as any)(ErrorContext)[1];
const setErrorObject = (useContext as any)(ErrorContext)[1];
const [selectedTabIndex, setSelectedTabIndex] = useState<number>(0);
const navigate = useNavigate();
useEffect(() => {
setErrorMessage(null);
setErrorObject(null);
let newSelectedTabIndex = 0;
if (location.pathname.match(/^\/tasks\/completed-instances\b/)) {
newSelectedTabIndex = 1;
@ -24,7 +24,7 @@ export default function HomePageRoutes() {
newSelectedTabIndex = 2;
}
setSelectedTabIndex(newSelectedTabIndex);
}, [location, setErrorMessage]);
}, [location, setErrorObject]);
const renderTabs = () => {
if (location.pathname.match(/^\/tasks\/\d+\/\b/)) {

Some files were not shown because too many files have changed in this diff Show More