merged in main and resolved conflicts w/ burnettk

This commit is contained in:
jasquat 2022-12-16 13:53:43 -05:00
commit bb6e7713f2
49 changed files with 932 additions and 263 deletions

View File

@ -170,15 +170,17 @@ def set_user_sentry_context() -> None:
def handle_exception(exception: Exception) -> flask.wrappers.Response: def handle_exception(exception: Exception) -> flask.wrappers.Response:
"""Handles unexpected exceptions.""" """Handles unexpected exceptions."""
set_user_sentry_context() set_user_sentry_context()
id = capture_exception(exception)
organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
sentry_link = None sentry_link = None
if organization_slug and project_slug: if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
sentry_link = ( id = capture_exception(exception)
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
) organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
if organization_slug and project_slug:
sentry_link = (
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
)
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception # !!!NOTE!!!: do this after sentry stuff since calling logger.exception
# seems to break the sentry sdk context where we no longer get back # seems to break the sentry sdk context where we no longer get back

View File

@ -9,7 +9,7 @@ set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models" export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models"
fi fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then

View File

@ -11,11 +11,12 @@ set -o errtrace -o errexit -o nounset -o pipefail
bpmn_models_absolute_dir="$1" bpmn_models_absolute_dir="$1"
git_commit_message="$2" git_commit_message="$2"
git_commit_username="$3" git_branch="$3"
git_commit_email="$4" git_commit_username="$4"
git_commit_email="$5"
if [[ -z "${2:-}" ]]; then if [[ -z "${5:-}" ]]; then
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]" >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]"
exit 1 exit 1
fi fi
@ -26,11 +27,8 @@ git add .
if [ -z "$(git status --porcelain)" ]; then if [ -z "$(git status --porcelain)" ]; then
echo "No changes to commit" echo "No changes to commit"
else else
if [[ -n "$git_commit_username" ]]; then git config --local user.name "$git_commit_username"
git config --local user.name "$git_commit_username" git config --local user.email "$git_commit_email"
fi
if [[ -n "$git_commit_email" ]]; then
git config --local user.email "$git_commit_email"
fi
git commit -m "$git_commit_message" git commit -m "$git_commit_message"
git push --set-upstream origin "$git_branch"
fi fi

View File

@ -18,7 +18,19 @@ set -o errtrace -o errexit -o nounset -o pipefail
if ! docker network inspect spiffworkflow > /dev/null 2>&1; then if ! docker network inspect spiffworkflow > /dev/null 2>&1; then
docker network create spiffworkflow docker network create spiffworkflow
fi fi
docker rm keycloak 2>/dev/null || echo 'no keycloak container found, safe to start new container'
# https://stackoverflow.com/a/60579344/6090676
container_name="keycloak"
if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then
echo ":: Found container - $container_name"
if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then
echo ":: Stopping running container - $container_name"
docker stop $container_name
fi
echo ":: Removing stopped container - $container_name"
docker rm $container_name
fi
docker run \ docker run \
-p 7002:8080 \ -p 7002:8080 \
-d \ -d \

View File

@ -68,7 +68,7 @@ services:
- "7000:7000" - "7000:7000"
network_mode: host network_mode: host
volumes: volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models - ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
- ./log:/app/log - ./log:/app/log
healthcheck: healthcheck:
test: curl localhost:7000/v1.0/status --fail test: curl localhost:7000/v1.0/status --fail
@ -82,7 +82,7 @@ services:
profiles: profiles:
- debug - debug
volumes: volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models - ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
- ./:/app - ./:/app
command: /app/bin/boot_in_docker_debug_mode command: /app/bin/boot_in_docker_debug_mode

View File

@ -654,7 +654,7 @@ werkzeug = "*"
type = "git" type = "git"
url = "https://github.com/sartography/flask-bpmn" url = "https://github.com/sartography/flask-bpmn"
reference = "main" reference = "main"
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4" resolved_reference = "0f2d249d0e799bec912d46132e9ef9754fdacbd7"
[[package]] [[package]]
name = "Flask-Cors" name = "Flask-Cors"
@ -1851,7 +1851,7 @@ lxml = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134" resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994"
[[package]] [[package]]
name = "SQLAlchemy" name = "SQLAlchemy"
@ -2563,7 +2563,6 @@ greenlet = [
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
@ -2572,7 +2571,6 @@ greenlet = [
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
@ -2581,7 +2579,6 @@ greenlet = [
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
@ -2880,7 +2877,10 @@ orjson = [
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
@ -2989,18 +2989,7 @@ psycopg2 = [
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
] ]
pyasn1 = [ pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
] ]
pycodestyle = [ pycodestyle = [

View File

@ -616,15 +616,9 @@ paths:
items: items:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/script-unit-tests: /process-models/{modified_process_model_identifier}/script-unit-tests:
parameters: parameters:
- name: process_group_id - name: modified_process_model_identifier
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path in: path
required: true required: true
description: The unique id of an existing process model. description: The unique id of an existing process model.
@ -643,15 +637,9 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/script-unit-tests/run: /process-models/{modified_process_model_identifier}/script-unit-tests/run:
parameters: parameters:
- name: process_group_id - name: modified_process_model_identifier
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path in: path
required: true required: true
description: The unique id of an existing process model. description: The unique id of an existing process model.
@ -691,6 +679,53 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-instances/{modified_process_model_identifier}/{process_instance_id}/task-info:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The unique id of an existing process model
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
- name: process_identifier
in: query
required: false
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
schema:
type: string
- name: all_tasks
in: query
required: false
description: If true, this wil return all tasks associated with the process instance and not just user tasks.
schema:
type: boolean
- name: spiff_step
in: query
required: false
description: If set will return the tasks as they were during a specific step of execution.
schema:
type: integer
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data
summary: returns the list of all user tasks associated with process instance without the task data
responses:
"200":
description: list of tasks
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Task"
/process-instances/{modified_process_model_identifier}/{process_instance_id}: /process-instances/{modified_process_model_identifier}/{process_instance_id}:
parameters: parameters:
- name: modified_process_model_identifier - name: modified_process_model_identifier
@ -705,6 +740,12 @@ paths:
description: The unique id of an existing process instance. description: The unique id of an existing process instance.
schema: schema:
type: integer type: integer
- name: process_identifier
in: query
required: false
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
schema:
type: string
get: get:
tags: tags:
- Process Instances - Process Instances
@ -1166,8 +1207,8 @@ paths:
get: get:
tags: tags:
- Process Instances - Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_with_task_data
summary: returns the list of all user tasks associated with process instance summary: returns the list of all user tasks associated with process instance with the task data
responses: responses:
"200": "200":
description: list of tasks description: list of tasks

View File

@ -17,5 +17,3 @@ GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
) )
GIT_USERNAME = "sartography-automated-committer" GIT_USERNAME = "sartography-automated-committer"
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com" GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
GIT_BRANCH_TO_PUBLISH_TO = "main"
GIT_BRANCH = "main"

View File

@ -17,7 +17,6 @@ groups:
dan, dan,
mike, mike,
jason, jason,
j,
jarrad, jarrad,
elizabeth, elizabeth,
jon, jon,
@ -32,7 +31,6 @@ groups:
dan, dan,
mike, mike,
jason, jason,
j,
amir, amir,
jarrad, jarrad,
elizabeth, elizabeth,
@ -64,6 +62,12 @@ groups:
harmeet, harmeet,
] ]
admin-ro:
users:
[
j,
]
permissions: permissions:
admin: admin:
groups: [admin] groups: [admin]
@ -71,6 +75,17 @@ permissions:
allowed_permissions: [create, read, update, delete] allowed_permissions: [create, read, update, delete]
uri: /* uri: /*
admin-readonly:
groups: [admin-ro]
users: []
allowed_permissions: [read]
uri: /*
admin-process-instances-for-readonly:
groups: [admin-ro]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
tasks-crud: tasks-crud:
groups: [everybody] groups: [everybody]
users: [] users: []
@ -114,12 +129,12 @@ permissions:
users: [] users: []
allowed_permissions: [read] allowed_permissions: [read]
uri: /v1.0/processes uri: /v1.0/processes
#
task-data-read: # task-data-read:
groups: [demo] # groups: [demo]
users: [] # users: []
allowed_permissions: [read] # allowed_permissions: [read]
uri: /v1.0/task-data/* # uri: /v1.0/task-data/*
manage-procurement-admin: manage-procurement-admin:

View File

@ -0,0 +1,165 @@
default_group: everybody
groups:
admin:
users:
[
admin,
jakub,
kb,
alex,
dan,
mike,
jason,
j,
jarrad,
elizabeth,
jon,
natalia,
]
Finance Team:
users:
[
jakub,
alex,
dan,
mike,
jason,
j,
amir,
jarrad,
elizabeth,
jon,
natalia,
sasha,
fin,
fin1,
]
demo:
users:
[
core,
fin,
fin1,
harmeet,
sasha,
manuchehr,
lead,
lead1
]
core-contributor:
users:
[
core,
harmeet,
]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [read]
uri: /*
admin-process-instances:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
# read all for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-models/*
read-all-process-instance:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/*
read-process-instance-reports:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/processes
manage-procurement-admin-instances:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement:*
manage-procurement-admin-instances-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement/*
manage-procurement-admin-instance-logs:
groups: ["Project Lead"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:*
manage-procurement-admin-instance-logs-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement/*
manage-revenue-streams-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-revenue-streams-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-invoice-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
manage-procurement-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:vendor-lifecycle-management:*

View File

@ -148,33 +148,18 @@ permissions:
allowed_permissions: [create, read, update, delete] allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/manage-procurement:procurement:* uri: /v1.0/process-groups/manage-procurement:procurement:*
manage-revenue-streams-instantiate:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create]
uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-revenue-streams-instances: manage-revenue-streams-instances:
groups: ["core-contributor", "demo"] groups: ["core-contributor", "demo"]
users: [] users: []
allowed_permissions: [create, read] allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instantiate:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create]
uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-invoice-instances: manage-procurement-invoice-instances:
groups: ["core-contributor", "demo"] groups: ["core-contributor", "demo"]
users: [] users: []
allowed_permissions: [create, read] allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instantiate:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create]
uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:*
manage-procurement-instances: manage-procurement-instances:
groups: ["core-contributor", "demo"] groups: ["core-contributor", "demo"]
users: [] users: []

View File

@ -4,3 +4,4 @@ from os import environ
GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging") GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main") GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
GIT_COMMIT_ON_SAVE = False GIT_COMMIT_ON_SAVE = False
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"

View File

@ -15,6 +15,7 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug" "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
) )
GIT_COMMIT_ON_SAVE = False
# NOTE: set this here since nox shoves tests and src code to # NOTE: set this here since nox shoves tests and src code to
# different places and this allows us to know exactly where we are at the start # different places and this allows us to know exactly where we are at the start

View File

@ -1,4 +1,4 @@
"""Spiff_step_details.""" """Process_instance_metadata."""
from dataclasses import dataclass from dataclasses import dataclass
from flask_bpmn.models.db import db from flask_bpmn.models.db import db

View File

@ -8,6 +8,10 @@ from marshmallow import INCLUDE
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
class SpecReferenceNotFoundError(Exception):
"""SpecReferenceNotFoundError."""
@dataclass() @dataclass()
class SpecReference: class SpecReference:
"""File Reference Information. """File Reference Information.

View File

@ -8,7 +8,7 @@ from flask_bpmn.models.db import SpiffworkflowBaseDBModel
@dataclass @dataclass
class SpiffLoggingModel(SpiffworkflowBaseDBModel): class SpiffLoggingModel(SpiffworkflowBaseDBModel):
"""LoggingModel.""" """SpiffLoggingModel."""
__tablename__ = "spiff_logging" __tablename__ = "spiff_logging"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)

View File

@ -21,7 +21,7 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
) )
spiff_step: int = db.Column(db.Integer, nullable=False) spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: str = deferred(db.Column(db.JSON, nullable=False)) # type: ignore task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
completed_by_user_id: int = db.Column(db.Integer, nullable=True) completed_by_user_id: int = db.Column(db.Integer, nullable=True)
lane_assignment_id: Optional[int] = db.Column( lane_assignment_id: Optional[int] = db.Column(

View File

@ -108,7 +108,7 @@ class Task:
multi_instance_type: Union[MultiInstanceType, None] = None, multi_instance_type: Union[MultiInstanceType, None] = None,
multi_instance_count: str = "", multi_instance_count: str = "",
multi_instance_index: str = "", multi_instance_index: str = "",
process_name: str = "", process_identifier: str = "",
properties: Union[dict, None] = None, properties: Union[dict, None] = None,
process_instance_id: Union[int, None] = None, process_instance_id: Union[int, None] = None,
process_instance_status: Union[str, None] = None, process_instance_status: Union[str, None] = None,
@ -118,6 +118,7 @@ class Task:
form_schema: Union[str, None] = None, form_schema: Union[str, None] = None,
form_ui_schema: Union[str, None] = None, form_ui_schema: Union[str, None] = None,
parent: Optional[str] = None, parent: Optional[str] = None,
call_activity_process_identifier: Optional[str] = None,
): ):
"""__init__.""" """__init__."""
self.id = id self.id = id
@ -129,6 +130,7 @@ class Task:
self.documentation = documentation self.documentation = documentation
self.lane = lane self.lane = lane
self.parent = parent self.parent = parent
self.call_activity_process_identifier = call_activity_process_identifier
self.data = data self.data = data
if self.data is None: if self.data is None:
@ -151,7 +153,7 @@ class Task:
self.multi_instance_index = ( self.multi_instance_index = (
multi_instance_index # And the index of the currently repeating task. multi_instance_index # And the index of the currently repeating task.
) )
self.process_name = process_name self.process_identifier = process_identifier
self.properties = properties # Arbitrary extension properties from BPMN editor. self.properties = properties # Arbitrary extension properties from BPMN editor.
if self.properties is None: if self.properties is None:
@ -177,7 +179,7 @@ class Task:
"multi_instance_type": multi_instance_type, "multi_instance_type": multi_instance_type,
"multi_instance_count": self.multi_instance_count, "multi_instance_count": self.multi_instance_count,
"multi_instance_index": self.multi_instance_index, "multi_instance_index": self.multi_instance_index,
"process_name": self.process_name, "process_identifier": self.process_identifier,
"properties": self.properties, "properties": self.properties,
"process_instance_id": self.process_instance_id, "process_instance_id": self.process_instance_id,
"process_instance_status": self.process_instance_status, "process_instance_status": self.process_instance_status,
@ -187,6 +189,7 @@ class Task:
"form_schema": self.form_schema, "form_schema": self.form_schema,
"form_ui_schema": self.form_ui_schema, "form_ui_schema": self.form_ui_schema,
"parent": self.parent, "parent": self.parent,
"call_activity_process_identifier": self.call_activity_process_identifier,
} }
@classmethod @classmethod
@ -282,7 +285,7 @@ class TaskSchema(Schema):
"multi_instance_type", "multi_instance_type",
"multi_instance_count", "multi_instance_count",
"multi_instance_index", "multi_instance_index",
"process_name", "process_identifier",
"properties", "properties",
"process_instance_id", "process_instance_id",
"form_schema", "form_schema",
@ -293,7 +296,7 @@ class TaskSchema(Schema):
documentation = marshmallow.fields.String(required=False, allow_none=True) documentation = marshmallow.fields.String(required=False, allow_none=True)
# form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True) # form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)
title = marshmallow.fields.String(required=False, allow_none=True) title = marshmallow.fields.String(required=False, allow_none=True)
process_name = marshmallow.fields.String(required=False, allow_none=True) process_identifier = marshmallow.fields.String(required=False, allow_none=True)
lane = marshmallow.fields.String(required=False, allow_none=True) lane = marshmallow.fields.String(required=False, allow_none=True)
@marshmallow.post_load @marshmallow.post_load

View File

@ -1,5 +1,6 @@
"""APIs for dealing with process groups, process models, and process instances.""" """APIs for dealing with process groups, process models, and process instances."""
import json import json
import os
import random import random
import re import re
import string import string
@ -66,6 +67,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.secret_model import SecretModel from spiffworkflow_backend.models.secret_model import SecretModel
from spiffworkflow_backend.models.secret_model import SecretModelSchema from spiffworkflow_backend.models.secret_model import SecretModelSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
@ -74,6 +76,7 @@ from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignme
from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
@ -167,6 +170,9 @@ def process_group_add(body: dict) -> flask.wrappers.Response:
"""Add_process_group.""" """Add_process_group."""
process_group = ProcessGroup(**body) process_group = ProcessGroup(**body)
ProcessModelService.add_process_group(process_group) ProcessModelService.add_process_group(process_group)
commit_and_push_to_git(
f"User: {g.user.username} added process group {process_group.id}"
)
return make_response(jsonify(process_group), 201) return make_response(jsonify(process_group), 201)
@ -174,6 +180,9 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo
"""Process_group_delete.""" """Process_group_delete."""
process_group_id = un_modify_modified_process_model_id(modified_process_group_id) process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
ProcessModelService().process_group_delete(process_group_id) ProcessModelService().process_group_delete(process_group_id)
commit_and_push_to_git(
f"User: {g.user.username} deleted process group {process_group_id}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -191,6 +200,9 @@ def process_group_update(
process_group_id = un_modify_modified_process_model_id(modified_process_group_id) process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
process_group = ProcessGroup(id=process_group_id, **body_filtered) process_group = ProcessGroup(id=process_group_id, **body_filtered)
ProcessModelService.update_process_group(process_group) ProcessModelService.update_process_group(process_group)
commit_and_push_to_git(
f"User: {g.user.username} updated process group {process_group_id}"
)
return make_response(jsonify(process_group), 200) return make_response(jsonify(process_group), 200)
@ -255,7 +267,10 @@ def process_group_move(
new_process_group = ProcessModelService().process_group_move( new_process_group = ProcessModelService().process_group_move(
original_process_group_id, new_location original_process_group_id, new_location
) )
return make_response(jsonify(new_process_group), 201) commit_and_push_to_git(
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
)
return make_response(jsonify(new_process_group), 200)
def process_model_create( def process_model_create(
@ -303,6 +318,9 @@ def process_model_create(
) )
ProcessModelService.add_process_model(process_model_info) ProcessModelService.add_process_model(process_model_info)
commit_and_push_to_git(
f"User: {g.user.username} created process model {process_model_info.id}"
)
return Response( return Response(
json.dumps(ProcessModelInfoSchema().dump(process_model_info)), json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
status=201, status=201,
@ -316,6 +334,9 @@ def process_model_delete(
"""Process_model_delete.""" """Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
ProcessModelService().process_model_delete(process_model_identifier) ProcessModelService().process_model_delete(process_model_identifier)
commit_and_push_to_git(
f"User: {g.user.username} deleted process model {process_model_identifier}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -339,6 +360,9 @@ def process_model_update(
process_model = get_process_model(process_model_identifier) process_model = get_process_model(process_model_identifier)
ProcessModelService.update_process_model(process_model, body_filtered) ProcessModelService.update_process_model(process_model, body_filtered)
commit_and_push_to_git(
f"User: {g.user.username} updated process model {process_model_identifier}"
)
return ProcessModelInfoSchema().dump(process_model) return ProcessModelInfoSchema().dump(process_model)
@ -370,7 +394,10 @@ def process_model_move(
new_process_model = ProcessModelService().process_model_move( new_process_model = ProcessModelService().process_model_move(
original_process_model_id, new_location original_process_model_id, new_location
) )
return make_response(jsonify(new_process_model), 201) commit_and_push_to_git(
f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
)
return make_response(jsonify(new_process_model), 200)
def process_model_publish( def process_model_publish(
@ -466,14 +493,9 @@ def process_model_file_update(
) )
SpecFileService.update_file(process_model, file_name, request_file_contents) SpecFileService.update_file(process_model, file_name, request_file_contents)
commit_and_push_to_git(
if current_app.config["GIT_COMMIT_ON_SAVE"]: f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
git_output = GitService.commit( )
message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
)
current_app.logger.info(f"git output: {git_output}")
else:
current_app.logger.info("Git commit on save is disabled")
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -495,6 +517,9 @@ def process_model_file_delete(
) )
) from exception ) from exception
commit_and_push_to_git(
f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -516,6 +541,9 @@ def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response:
file_contents = SpecFileService.get_data(process_model, file.name) file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents file.file_contents = file_contents
file.process_model_id = process_model.id file.process_model_id = process_model.id
commit_and_push_to_git(
f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}"
)
return Response( return Response(
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
) )
@ -1031,11 +1059,11 @@ def process_instance_list(
elif attribute in instance_metadata_aliases: elif attribute in instance_metadata_aliases:
if order_by_option.startswith("-"): if order_by_option.startswith("-"):
order_by_query_array.append( order_by_query_array.append(
instance_metadata_aliases[attribute].value.desc() func.max(instance_metadata_aliases[attribute].value).desc()
) )
else: else:
order_by_query_array.append( order_by_query_array.append(
instance_metadata_aliases[attribute].value.asc() func.max(instance_metadata_aliases[attribute].value).asc()
) )
process_instances = ( process_instances = (
@ -1080,25 +1108,48 @@ def process_instance_report_column_list() -> flask.wrappers.Response:
def process_instance_show( def process_instance_show(
modified_process_model_identifier: str, process_instance_id: int modified_process_model_identifier: str,
process_instance_id: int,
process_identifier: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Create_process_instance.""" """Create_process_instance."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_instance = find_process_instance_by_id_or_raise(process_instance_id) process_instance = find_process_instance_by_id_or_raise(process_instance_id)
current_version_control_revision = GitService.get_current_revision() current_version_control_revision = GitService.get_current_revision()
process_model = get_process_model(process_model_identifier)
if process_model.primary_file_name: process_model_with_diagram = None
name_of_file_with_diagram = None
if process_identifier:
spec_reference = SpecReferenceCache.query.filter_by(
identifier=process_identifier
).first()
if spec_reference is None:
raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {process_identifier}"
)
process_model_with_diagram = ProcessModelService.get_process_model(
spec_reference.process_model_id
)
name_of_file_with_diagram = spec_reference.file_name
else:
process_model_with_diagram = get_process_model(process_model_identifier)
if process_model_with_diagram.primary_file_name:
name_of_file_with_diagram = process_model_with_diagram.primary_file_name
if process_model_with_diagram and name_of_file_with_diagram:
if ( if (
process_instance.bpmn_version_control_identifier process_instance.bpmn_version_control_identifier
== current_version_control_revision == current_version_control_revision
): ):
bpmn_xml_file_contents = SpecFileService.get_data( bpmn_xml_file_contents = SpecFileService.get_data(
process_model, process_model.primary_file_name process_model_with_diagram, name_of_file_with_diagram
).decode("utf-8") ).decode("utf-8")
else: else:
bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
process_model, process_instance.bpmn_version_control_identifier process_model_with_diagram,
process_instance.bpmn_version_control_identifier,
file_name=name_of_file_with_diagram,
) )
process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
@ -1415,11 +1466,44 @@ def get_tasks(
return make_response(jsonify(response_json), 200) return make_response(jsonify(response_json), 200)
def process_instance_task_list( def process_instance_task_list_without_task_data(
modified_process_model_identifier: str, modified_process_model_identifier: str,
process_instance_id: int, process_instance_id: int,
all_tasks: bool = False, all_tasks: bool = False,
spiff_step: int = 0, spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data."""
return process_instance_task_list(
modified_process_model_identifier,
process_instance_id,
all_tasks,
spiff_step,
get_task_data=False,
)
def process_instance_task_list_with_task_data(
modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_task_list_with_task_data."""
return process_instance_task_list(
modified_process_model_identifier,
process_instance_id,
all_tasks,
spiff_step,
get_task_data=True,
)
def process_instance_task_list(
_modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
get_task_data: bool = False,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list.""" """Process_instance_task_list."""
process_instance = find_process_instance_by_id_or_raise(process_instance_id) process_instance = find_process_instance_by_id_or_raise(process_instance_id)
@ -1435,7 +1519,8 @@ def process_instance_task_list(
) )
if step_detail is not None and process_instance.bpmn_json is not None: if step_detail is not None and process_instance.bpmn_json is not None:
bpmn_json = json.loads(process_instance.bpmn_json) bpmn_json = json.loads(process_instance.bpmn_json)
bpmn_json["tasks"] = step_detail.task_json bpmn_json["tasks"] = step_detail.task_json["tasks"]
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
process_instance.bpmn_json = json.dumps(bpmn_json) process_instance.bpmn_json = json.dumps(bpmn_json)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
@ -1449,7 +1534,8 @@ def process_instance_task_list(
tasks = [] tasks = []
for spiff_task in spiff_tasks: for spiff_task in spiff_tasks:
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
task.data = spiff_task.data if get_task_data:
task.data = spiff_task.data
tasks.append(task) tasks.append(task)
return make_response(jsonify(tasks), 200) return make_response(jsonify(tasks), 200)
@ -1485,7 +1571,25 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
task.data = spiff_task.data task.data = spiff_task.data
task.process_model_display_name = process_model.display_name task.process_model_display_name = process_model.display_name
task.process_model_identifier = process_model.id task.process_model_identifier = process_model.id
process_model_with_form = process_model process_model_with_form = process_model
refs = SpecFileService.get_references_for_process(process_model_with_form)
all_processes = [i.identifier for i in refs]
if task.process_identifier not in all_processes:
bpmn_file_full_path = (
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
task.process_identifier
)
)
relative_path = os.path.relpath(
bpmn_file_full_path, start=FileSystemService.root_path()
)
process_model_relative_path = os.path.dirname(relative_path)
process_model_with_form = (
ProcessModelService.get_process_model_from_relative_path(
process_model_relative_path
)
)
if task.type == "User Task": if task.type == "User Task":
if not form_schema_file_name: if not form_schema_file_name:
@ -1614,7 +1718,7 @@ def task_submit(
def script_unit_test_create( def script_unit_test_create(
process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Script_unit_test_create.""" """Script_unit_test_create."""
bpmn_task_identifier = _get_required_parameter_or_raise( bpmn_task_identifier = _get_required_parameter_or_raise(
@ -1625,7 +1729,7 @@ def script_unit_test_create(
"expected_output_json", body "expected_output_json", body
) )
process_model_identifier = f"{process_group_id}/{process_model_id}" process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = get_process_model(process_model_identifier) process_model = get_process_model(process_model_identifier)
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
if file is None: if file is None:
@ -1703,7 +1807,7 @@ def script_unit_test_create(
def script_unit_test_run( def script_unit_test_run(
process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Script_unit_test_run.""" """Script_unit_test_run."""
# FIXME: We should probably clear this somewhere else but this works # FIXME: We should probably clear this somewhere else but this works
@ -1899,7 +2003,6 @@ def secret_list(
def add_secret(body: Dict) -> Response: def add_secret(body: Dict) -> Response:
"""Add secret.""" """Add secret."""
secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
assert secret_model # noqa: S101
return Response( return Response(
json.dumps(SecretModelSchema().dump(secret_model)), json.dumps(SecretModelSchema().dump(secret_model)),
status=201, status=201,
@ -2040,3 +2143,12 @@ def update_task_data(
status=200, status=200,
mimetype="application/json", mimetype="application/json",
) )
def commit_and_push_to_git(message: str) -> None:
"""Commit_and_push_to_git."""
if current_app.config["GIT_COMMIT_ON_SAVE"]:
git_output = GitService.commit(message=message)
current_app.logger.info(f"git output: {git_output}")
else:
current_app.logger.info("Git commit on save is disabled")

View File

@ -16,8 +16,9 @@ from flask_bpmn.api.api_error import ApiError
from werkzeug.wrappers import Response from werkzeug.wrappers import Response
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authentication_service import AuthenticationService
from spiffworkflow_backend.services.authentication_service import ( from spiffworkflow_backend.services.authentication_service import (
AuthenticationService, MissingAccessTokenError,
) )
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
@ -268,10 +269,10 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
code, "/v1.0/login_api_return" code, "/v1.0/login_api_return"
) )
access_token: str = auth_token_object["access_token"] access_token: str = auth_token_object["access_token"]
assert access_token # noqa: S101 if access_token is None:
raise MissingAccessTokenError("Cannot find the access token for the request")
return access_token return access_token
# return redirect("localhost:7000/v1.0/ui")
# return {'uid': 'user_1'}
def logout(id_token: str, redirect_url: Optional[str]) -> Response: def logout(id_token: str, redirect_url: Optional[str]) -> Response:

View File

@ -16,6 +16,10 @@ from werkzeug.wrappers import Response
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
class MissingAccessTokenError(Exception):
"""MissingAccessTokenError."""
class AuthenticationProviderTypes(enum.Enum): class AuthenticationProviderTypes(enum.Enum):
"""AuthenticationServiceProviders.""" """AuthenticationServiceProviders."""

View File

@ -46,24 +46,39 @@ class GitService:
@classmethod @classmethod
def get_instance_file_contents_for_revision( def get_instance_file_contents_for_revision(
cls, process_model: ProcessModelInfo, revision: str cls,
process_model: ProcessModelInfo,
revision: str,
file_name: Optional[str] = None,
) -> str: ) -> str:
"""Get_instance_file_contents_for_revision.""" """Get_instance_file_contents_for_revision."""
bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
process_model_relative_path = FileSystemService.process_model_relative_path( process_model_relative_path = FileSystemService.process_model_relative_path(
process_model process_model
) )
file_name_to_use = file_name
if file_name_to_use is None:
file_name_to_use = process_model.primary_file_name
with FileSystemService.cd(bpmn_spec_absolute_dir): with FileSystemService.cd(bpmn_spec_absolute_dir):
shell_command = [ shell_command = [
"git", "git",
"show", "show",
f"{revision}:{process_model_relative_path}/{process_model.primary_file_name}", f"{revision}:{process_model_relative_path}/{file_name_to_use}",
] ]
return cls.run_shell_command_to_get_stdout(shell_command) return cls.run_shell_command_to_get_stdout(shell_command)
@classmethod @classmethod
def commit(cls, message: str, repo_path: Optional[str] = None) -> str: def commit(
cls,
message: str,
repo_path: Optional[str] = None,
branch_name: Optional[str] = None,
) -> str:
"""Commit.""" """Commit."""
cls.check_for_basic_configs()
branch_name_to_use = branch_name
if branch_name_to_use is None:
branch_name_to_use = current_app.config["GIT_BRANCH"]
repo_path_to_use = repo_path repo_path_to_use = repo_path
if repo_path is None: if repo_path is None:
repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
@ -82,14 +97,25 @@ class GitService:
shell_command_path, shell_command_path,
repo_path_to_use, repo_path_to_use,
message, message,
branch_name_to_use,
git_username, git_username,
git_email, git_email,
] ]
return cls.run_shell_command_to_get_stdout(shell_command) return cls.run_shell_command_to_get_stdout(shell_command)
@classmethod @classmethod
def check_for_configs(cls) -> None: def check_for_basic_configs(cls) -> None:
"""Check_for_basic_configs."""
if current_app.config["GIT_BRANCH"] is None:
raise MissingGitConfigsError(
"Missing config for GIT_BRANCH. "
"This is required for publishing process models"
)
@classmethod
def check_for_publish_configs(cls) -> None:
"""Check_for_configs.""" """Check_for_configs."""
cls.check_for_basic_configs()
if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None: if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None:
raise MissingGitConfigsError( raise MissingGitConfigsError(
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. " "Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
@ -142,7 +168,7 @@ class GitService:
@classmethod @classmethod
def handle_web_hook(cls, webhook: dict) -> bool: def handle_web_hook(cls, webhook: dict) -> bool:
"""Handle_web_hook.""" """Handle_web_hook."""
cls.check_for_configs() cls.check_for_publish_configs()
if "repository" not in webhook or "clone_url" not in webhook["repository"]: if "repository" not in webhook or "clone_url" not in webhook["repository"]:
raise InvalidGitWebhookBodyError( raise InvalidGitWebhookBodyError(
@ -178,7 +204,7 @@ class GitService:
@classmethod @classmethod
def publish(cls, process_model_id: str, branch_to_update: str) -> str: def publish(cls, process_model_id: str, branch_to_update: str) -> str:
"""Publish.""" """Publish."""
cls.check_for_configs() cls.check_for_publish_configs()
source_process_model_root = FileSystemService.root_path() source_process_model_root = FileSystemService.root_path()
source_process_model_path = os.path.join( source_process_model_path = os.path.join(
source_process_model_root, process_model_id source_process_model_root, process_model_id
@ -227,10 +253,7 @@ class GitService:
f"Request to publish changes to {process_model_id}, " f"Request to publish changes to {process_model_id}, "
f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}" f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}"
) )
cls.commit(commit_message, destination_process_root) cls.commit(commit_message, destination_process_root, branch_to_pull_request)
cls.run_shell_command(
["git", "push", "--set-upstream", "origin", branch_to_pull_request]
)
# build url for github page to open PR # build url for github page to open PR
git_remote = cls.run_shell_command_to_get_stdout( git_remote = cls.run_shell_command_to_get_stdout(

View File

@ -551,7 +551,7 @@ class ProcessInstanceProcessor:
"""SaveSpiffStepDetails.""" """SaveSpiffStepDetails."""
bpmn_json = self.serialize() bpmn_json = self.serialize()
wf_json = json.loads(bpmn_json) wf_json = json.loads(bpmn_json)
task_json = wf_json["tasks"] task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]}
return { return {
"process_instance_id": self.process_instance_model.id, "process_instance_id": self.process_instance_model.id,

View File

@ -302,6 +302,11 @@ class ProcessInstanceService:
else: else:
lane = None lane = None
if hasattr(spiff_task.task_spec, "spec"):
call_activity_process_identifier = spiff_task.task_spec.spec
else:
call_activity_process_identifier = None
parent_id = None parent_id = None
if spiff_task.parent: if spiff_task.parent:
parent_id = spiff_task.parent.id parent_id = spiff_task.parent.id
@ -316,9 +321,10 @@ class ProcessInstanceService:
multi_instance_type=mi_type, multi_instance_type=mi_type,
multi_instance_count=info["mi_count"], multi_instance_count=info["mi_count"],
multi_instance_index=info["mi_index"], multi_instance_index=info["mi_index"],
process_name=spiff_task.task_spec._wf_spec.description, process_identifier=spiff_task.task_spec._wf_spec.name,
properties=props, properties=props,
parent=parent_id, parent=parent_id,
call_activity_process_identifier=call_activity_process_identifier,
) )
return task return task

View File

@ -223,7 +223,7 @@ class ProcessModelService(FileSystemService):
user = UserService.current_user() user = UserService.current_user()
new_process_model_list = [] new_process_model_list = []
for process_model in process_models: for process_model in process_models:
uri = f"/v1.0/process-models/{process_model.id.replace('/', ':')}/process-instances" uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
result = AuthorizationService.user_has_permission( result = AuthorizationService.user_has_permission(
user=user, permission="create", target_uri=uri user=user, permission="create", target_uri=uri
) )

View File

@ -31,7 +31,6 @@ class ServiceTaskDelegate:
if value.startswith(secret_prefix): if value.startswith(secret_prefix):
key = value.removeprefix(secret_prefix) key = value.removeprefix(secret_prefix)
secret = SecretService().get_secret(key) secret = SecretService().get_secret(key)
assert secret # noqa: S101
return secret.value return secret.value
file_prefix = "file:" file_prefix = "file:"

View File

@ -171,13 +171,18 @@ class SpecFileService(FileSystemService):
ref.is_primary = True ref.is_primary = True
if ref.is_primary: if ref.is_primary:
ProcessModelService.update_process_model( update_hash = {}
process_model_info, if not process_model_info.primary_file_name:
{ update_hash["primary_process_id"] = ref.identifier
"primary_process_id": ref.identifier, update_hash["primary_file_name"] = file_name
"primary_file_name": file_name, elif file_name == process_model_info.primary_file_name:
}, update_hash["primary_process_id"] = ref.identifier
)
if len(update_hash) > 0:
ProcessModelService.update_process_model(
process_model_info,
update_hash,
)
SpecFileService.update_caches(ref) SpecFileService.update_caches(ref)
return file return file

View File

@ -1167,6 +1167,60 @@ class TestProcessApi(BaseTest):
xml_file_contents = f_open.read() xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
def test_process_instance_show_with_specified_process_identifier(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_show_with_specified_process_identifier."""
process_model_id = "call_activity_nested"
process_model_identifier = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="test_group_two",
process_model_id=process_model_id,
bpmn_file_location="call_activity_nested",
)
spec_reference = SpecReferenceCache.query.filter_by(
identifier="Level2b"
).first()
assert spec_reference
modified_process_model_identifier = (
self.modify_process_identifier_for_path_param(process_model_identifier)
)
headers = self.logged_in_headers(with_super_admin_user)
create_response = self.create_process_instance_from_process_model_id(
client, process_model_identifier, headers
)
assert create_response.json is not None
assert create_response.status_code == 201
process_instance_id = create_response.json["id"]
client.post(
f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
show_response = client.get(
f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}?process_identifier={spec_reference.identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert show_response.json is not None
assert show_response.status_code == 200
file_system_root = FileSystemService.root_path()
process_instance_file_path = (
f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
)
with open(process_instance_file_path) as f_open:
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents
spec_reference_file_path = os.path.join(
file_system_root, spec_reference.relative_path
)
with open(spec_reference_file_path) as f_open:
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
def test_message_start_when_starting_process_instance( def test_message_start_when_starting_process_instance(
self, self,
app: Flask, app: Flask,
@ -2496,7 +2550,7 @@ class TestProcessApi(BaseTest):
f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}", f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}",
headers=self.logged_in_headers(with_super_admin_user), headers=self.logged_in_headers(with_super_admin_user),
) )
assert response.status_code == 201 assert response.status_code == 200
assert response.json["id"] == new_process_model_path assert response.json["id"] == new_process_model_path
# make sure the original model does not exist # make sure the original model does not exist
@ -2541,7 +2595,7 @@ class TestProcessApi(BaseTest):
f"/v1.0/process-groups/{modified_original_process_group_id}/move?new_location={new_location}", f"/v1.0/process-groups/{modified_original_process_group_id}/move?new_location={new_location}",
headers=self.logged_in_headers(with_super_admin_user), headers=self.logged_in_headers(with_super_admin_user),
) )
assert response.status_code == 201 assert response.status_code == 200
assert response.json["id"] == new_sub_path assert response.json["id"] == new_sub_path
# make sure the original subgroup does not exist # make sure the original subgroup does not exist

View File

@ -8,6 +8,9 @@
# testing # testing
/coverage /coverage
# in case we accidentally run backend tests in frontend. :D
/.coverage.*
# production # production
/build /build

View File

@ -1,4 +1,5 @@
import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; import { modifyProcessIdentifierForPathParam } from '../../src/helpers';
import { miscDisplayName } from '../support/helpers';
describe('process-models', () => { describe('process-models', () => {
beforeEach(() => { beforeEach(() => {
@ -16,7 +17,7 @@ describe('process-models', () => {
const modelDisplayName = `Test Model 2 ${id}`; const modelDisplayName = `Test Model 2 ${id}`;
const modelId = `test-model-2-${id}`; const modelId = `test-model-2-${id}`;
const newModelDisplayName = `${modelDisplayName} edited`; const newModelDisplayName = `${modelDisplayName} edited`;
cy.contains('99-Shared Resources').click(); cy.contains(miscDisplayName).click();
cy.wait(500); cy.wait(500);
cy.contains(groupDisplayName).click(); cy.contains(groupDisplayName).click();
cy.createModel(groupId, modelId, modelDisplayName); cy.createModel(groupId, modelId, modelDisplayName);
@ -34,7 +35,7 @@ describe('process-models', () => {
cy.contains(`Process Model: ${newModelDisplayName}`); cy.contains(`Process Model: ${newModelDisplayName}`);
// go back to process model show by clicking on the breadcrumb // go back to process model show by clicking on the breadcrumb
cy.contains(modelId).click(); cy.contains(modelDisplayName).click();
cy.getBySel('delete-process-model-button').click(); cy.getBySel('delete-process-model-button').click();
cy.contains('Are you sure'); cy.contains('Are you sure');
@ -46,6 +47,7 @@ describe('process-models', () => {
`process-groups/${modifyProcessIdentifierForPathParam(groupId)}` `process-groups/${modifyProcessIdentifierForPathParam(groupId)}`
); );
cy.contains(modelId).should('not.exist'); cy.contains(modelId).should('not.exist');
cy.contains(modelDisplayName).should('not.exist');
}); });
it('can create new bpmn, dmn, and json files', () => { it('can create new bpmn, dmn, and json files', () => {
@ -61,11 +63,11 @@ describe('process-models', () => {
const dmnFileName = `dmn_test_file_${id}`; const dmnFileName = `dmn_test_file_${id}`;
const jsonFileName = `json_test_file_${id}`; const jsonFileName = `json_test_file_${id}`;
cy.contains('99-Shared Resources').click(); cy.contains(miscDisplayName).click();
cy.wait(500); cy.wait(500);
cy.contains(groupDisplayName).click(); cy.contains(groupDisplayName).click();
cy.createModel(groupId, modelId, modelDisplayName); cy.createModel(groupId, modelId, modelDisplayName);
cy.contains(directParentGroupId).click(); cy.contains(groupDisplayName).click();
cy.contains(modelDisplayName).click(); cy.contains(modelDisplayName).click();
cy.url().should( cy.url().should(
'include', 'include',
@ -90,7 +92,7 @@ describe('process-models', () => {
cy.get('input[name=file_name]').type(bpmnFileName); cy.get('input[name=file_name]').type(bpmnFileName);
cy.contains('Save Changes').click(); cy.contains('Save Changes').click();
cy.contains(`Process Model File: ${bpmnFileName}`); cy.contains(`Process Model File: ${bpmnFileName}`);
cy.contains(modelId).click(); cy.contains(modelDisplayName).click();
cy.contains(`Process Model: ${modelDisplayName}`); cy.contains(`Process Model: ${modelDisplayName}`);
// cy.getBySel('files-accordion').click(); // cy.getBySel('files-accordion').click();
cy.contains(`${bpmnFileName}.bpmn`).should('exist'); cy.contains(`${bpmnFileName}.bpmn`).should('exist');
@ -108,7 +110,7 @@ describe('process-models', () => {
cy.get('input[name=file_name]').type(dmnFileName); cy.get('input[name=file_name]').type(dmnFileName);
cy.contains('Save Changes').click(); cy.contains('Save Changes').click();
cy.contains(`Process Model File: ${dmnFileName}`); cy.contains(`Process Model File: ${dmnFileName}`);
cy.contains(modelId).click(); cy.contains(modelDisplayName).click();
cy.contains(`Process Model: ${modelDisplayName}`); cy.contains(`Process Model: ${modelDisplayName}`);
// cy.getBySel('files-accordion').click(); // cy.getBySel('files-accordion').click();
cy.contains(`${dmnFileName}.dmn`).should('exist'); cy.contains(`${dmnFileName}.dmn`).should('exist');
@ -124,7 +126,7 @@ describe('process-models', () => {
cy.contains(`Process Model File: ${jsonFileName}`); cy.contains(`Process Model File: ${jsonFileName}`);
// wait for json to load before clicking away to avoid network errors // wait for json to load before clicking away to avoid network errors
cy.wait(500); cy.wait(500);
cy.contains(modelId).click(); cy.contains(modelDisplayName).click();
cy.contains(`Process Model: ${modelDisplayName}`); cy.contains(`Process Model: ${modelDisplayName}`);
// cy.getBySel('files-accordion').click(); // cy.getBySel('files-accordion').click();
cy.contains(`${jsonFileName}.json`).should('exist'); cy.contains(`${jsonFileName}.json`).should('exist');
@ -151,12 +153,12 @@ describe('process-models', () => {
const modelDisplayName = `Test Model 2 ${id}`; const modelDisplayName = `Test Model 2 ${id}`;
const modelId = `test-model-2-${id}`; const modelId = `test-model-2-${id}`;
cy.contains('Add a process group'); cy.contains('Add a process group');
cy.contains('99-Shared Resources').click(); cy.contains(miscDisplayName).click();
cy.wait(500); cy.wait(500);
cy.contains(groupDisplayName).click(); cy.contains(groupDisplayName).click();
cy.createModel(groupId, modelId, modelDisplayName); cy.createModel(groupId, modelId, modelDisplayName);
cy.contains(`${directParentGroupId}`).click(); cy.contains(`${groupDisplayName}`).click();
cy.contains('Add a process model'); cy.contains('Add a process model');
cy.contains(modelDisplayName).click(); cy.contains(modelDisplayName).click();
cy.url().should( cy.url().should(
@ -186,7 +188,7 @@ describe('process-models', () => {
.click(); .click();
// in breadcrumb // in breadcrumb
cy.contains(modelId).click(); cy.contains(modelDisplayName).click();
cy.getBySel('delete-process-model-button').click(); cy.getBySel('delete-process-model-button').click();
cy.contains('Are you sure'); cy.contains('Are you sure');
@ -203,7 +205,7 @@ describe('process-models', () => {
// process models no longer has pagination post-tiles // process models no longer has pagination post-tiles
// it.only('can paginate items', () => { // it.only('can paginate items', () => {
// cy.contains('99-Shared Resources').click(); // cy.contains(miscDisplayName).click();
// cy.wait(500); // cy.wait(500);
// cy.contains('Acceptance Tests Group One').click(); // cy.contains('Acceptance Tests Group One').click();
// cy.basicPaginationTest(); // cy.basicPaginationTest();

View File

@ -1,5 +1,6 @@
import { string } from 'prop-types'; import { string } from 'prop-types';
import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; import { modifyProcessIdentifierForPathParam } from '../../src/helpers';
import { miscDisplayName } from './helpers';
// *********************************************** // ***********************************************
// This example commands.js shows you how to // This example commands.js shows you how to
@ -86,15 +87,15 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => {
Cypress.Commands.add( Cypress.Commands.add(
'runPrimaryBpmnFile', 'runPrimaryBpmnFile',
(expectAutoRedirectToHumanTask = false) => { (expectAutoRedirectToHumanTask = false) => {
cy.contains('Run').click(); cy.contains('Start').click();
if (expectAutoRedirectToHumanTask) { if (expectAutoRedirectToHumanTask) {
// the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress. // the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress.
cy.url().should('include', `/tasks/`); cy.url().should('include', `/tasks/`);
cy.contains('Task: '); cy.contains('Task: ');
} else { } else {
cy.contains(/Process Instance.*kicked off/); cy.contains(/Process Instance.*[kK]icked [oO]ff/);
cy.reload(true); cy.reload(true);
cy.contains(/Process Instance.*kicked off/).should('not.exist'); cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist');
} }
} }
); );
@ -103,8 +104,8 @@ Cypress.Commands.add(
'navigateToProcessModel', 'navigateToProcessModel',
(groupDisplayName, modelDisplayName, modelIdentifier) => { (groupDisplayName, modelDisplayName, modelIdentifier) => {
cy.navigateToAdmin(); cy.navigateToAdmin();
cy.contains('99-Shared Resources').click(); cy.contains(miscDisplayName).click();
cy.contains(`Process Group: 99-Shared Resources`, { timeout: 10000 }); cy.contains(`Process Group: ${miscDisplayName}`, { timeout: 10000 });
cy.contains(groupDisplayName).click(); cy.contains(groupDisplayName).click();
cy.contains(`Process Group: ${groupDisplayName}`); cy.contains(`Process Group: ${groupDisplayName}`);
// https://stackoverflow.com/q/51254946/6090676 // https://stackoverflow.com/q/51254946/6090676

View File

@ -0,0 +1 @@
export const miscDisplayName = 'Shared Resources';

View File

@ -68,7 +68,7 @@
"@cypress/grep": "^3.1.0", "@cypress/grep": "^3.1.0",
"@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.6", "@typescript-eslint/parser": "^5.30.6",
"cypress": "^10.8.0", "cypress": "^12",
"eslint": "^8.19.0", "eslint": "^8.19.0",
"eslint_d": "^12.2.0", "eslint_d": "^12.2.0",
"eslint-config-airbnb": "^19.0.4", "eslint-config-airbnb": "^19.0.4",
@ -9850,9 +9850,9 @@
"integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==" "integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A=="
}, },
"node_modules/cypress": { "node_modules/cypress": {
"version": "10.11.0", "version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz", "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==", "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"dev": true, "dev": true,
"hasInstallScript": true, "hasInstallScript": true,
"dependencies": { "dependencies": {
@ -9903,7 +9903,7 @@
"cypress": "bin/cypress" "cypress": "bin/cypress"
}, },
"engines": { "engines": {
"node": ">=12.0.0" "node": "^14.0.0 || ^16.0.0 || >=18.0.0"
} }
}, },
"node_modules/cypress/node_modules/@types/node": { "node_modules/cypress/node_modules/@types/node": {
@ -38586,9 +38586,9 @@
"integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==" "integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A=="
}, },
"cypress": { "cypress": {
"version": "10.11.0", "version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz", "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==", "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"dev": true, "dev": true,
"requires": { "requires": {
"@cypress/request": "^2.88.10", "@cypress/request": "^2.88.10",

View File

@ -104,7 +104,7 @@
"@cypress/grep": "^3.1.0", "@cypress/grep": "^3.1.0",
"@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.6", "@typescript-eslint/parser": "^5.30.6",
"cypress": "^10.8.0", "cypress": "^12",
"eslint": "^8.19.0", "eslint": "^8.19.0",
"eslint_d": "^12.2.0", "eslint_d": "^12.2.0",
"eslint-config-airbnb": "^19.0.4", "eslint-config-airbnb": "^19.0.4",

View File

@ -13,6 +13,7 @@ import AdminRoutes from './routes/AdminRoutes';
import { ErrorForDisplay } from './interfaces'; import { ErrorForDisplay } from './interfaces';
import { AbilityContext } from './contexts/Can'; import { AbilityContext } from './contexts/Can';
import UserService from './services/UserService';
export default function App() { export default function App() {
const [errorMessage, setErrorMessage] = useState<ErrorForDisplay | null>( const [errorMessage, setErrorMessage] = useState<ErrorForDisplay | null>(
@ -24,6 +25,11 @@ export default function App() {
[errorMessage] [errorMessage]
); );
if (!UserService.isLoggedIn()) {
UserService.doLogin();
return null;
}
const ability = defineAbility(() => {}); const ability = defineAbility(() => {});
let errorTag = null; let errorTag = null;

View File

@ -24,6 +24,7 @@ import UserService from '../services/UserService';
import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import { PermissionsToCheck } from '../interfaces'; import { PermissionsToCheck } from '../interfaces';
import { usePermissionFetcher } from '../hooks/PermissionService'; import { usePermissionFetcher } from '../hooks/PermissionService';
import { UnauthenticatedError } from '../services/HttpService';
// for ref: https://react-bootstrap.github.io/components/navbar/ // for ref: https://react-bootstrap.github.io/components/navbar/
export default function NavigationBar() { export default function NavigationBar() {
@ -39,6 +40,11 @@ export default function NavigationBar() {
const [activeKey, setActiveKey] = useState<string>(''); const [activeKey, setActiveKey] = useState<string>('');
const { targetUris } = useUriListForPermissions(); const { targetUris } = useUriListForPermissions();
// App.jsx forces login (which redirects to keycloak) so we should never get here if we're not logged in.
if (!UserService.isLoggedIn()) {
throw new UnauthenticatedError('You must be authenticated to do this.');
}
const permissionRequestData: PermissionsToCheck = { const permissionRequestData: PermissionsToCheck = {
[targetUris.authenticationListPath]: ['GET'], [targetUris.authenticationListPath]: ['GET'],
[targetUris.messageInstanceListPath]: ['GET'], [targetUris.messageInstanceListPath]: ['GET'],
@ -135,6 +141,9 @@ export default function NavigationBar() {
}; };
const headerMenuItems = () => { const headerMenuItems = () => {
if (!UserService.isLoggedIn()) {
return null;
}
return ( return (
<> <>
<HeaderMenuItem href="/" isCurrentPage={isActivePage('/')}> <HeaderMenuItem href="/" isCurrentPage={isActivePage('/')}>

View File

@ -306,8 +306,13 @@ export default function ProcessInstanceListTable({
checkFiltersAndRun(); checkFiltersAndRun();
if (autoReload) { if (autoReload) {
refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, checkFiltersAndRun); return refreshAtInterval(
REFRESH_INTERVAL,
REFRESH_TIMEOUT,
checkFiltersAndRun
);
} }
return undefined;
}, [ }, [
autoReload, autoReload,
searchParams, searchParams,
@ -845,8 +850,8 @@ export default function ProcessInstanceListTable({
return null; return null;
}} }}
shouldFilterItem={shouldFilterReportColumn} shouldFilterItem={shouldFilterReportColumn}
placeholder="Choose a report column" placeholder="Choose a column to show"
titleText="Report Column" titleText="Column"
/> />
); );
} }
@ -895,7 +900,7 @@ export default function ProcessInstanceListTable({
kind="ghost" kind="ghost"
size="sm" size="sm"
className={`button-tag-icon ${tagTypeClass}`} className={`button-tag-icon ${tagTypeClass}`}
title={`Edit ${reportColumnForEditing.accessor}`} title={`Edit ${reportColumnForEditing.accessor} column`}
onClick={() => { onClick={() => {
setReportColumnToOperateOn(reportColumnForEditing); setReportColumnToOperateOn(reportColumnForEditing);
setShowReportColumnForm(true); setShowReportColumnForm(true);
@ -923,7 +928,7 @@ export default function ProcessInstanceListTable({
<Button <Button
data-qa="add-column-button" data-qa="add-column-button"
renderIcon={AddAlt} renderIcon={AddAlt}
iconDescription="Filter Options" iconDescription="Column options"
className="with-tiny-top-margin" className="with-tiny-top-margin"
kind="ghost" kind="ghost"
hasIconOnly hasIconOnly

View File

@ -52,22 +52,25 @@ import TouchModule from 'diagram-js/lib/navigation/touch';
// @ts-expect-error TS(7016) FIXME // @ts-expect-error TS(7016) FIXME
import ZoomScrollModule from 'diagram-js/lib/navigation/zoomscroll'; import ZoomScrollModule from 'diagram-js/lib/navigation/zoomscroll';
import { useNavigate } from 'react-router-dom';
import { Can } from '@casl/react'; import { Can } from '@casl/react';
import HttpService from '../services/HttpService'; import HttpService from '../services/HttpService';
import ButtonWithConfirmation from './ButtonWithConfirmation'; import ButtonWithConfirmation from './ButtonWithConfirmation';
import { makeid } from '../helpers'; import { getBpmnProcessIdentifiers, makeid } from '../helpers';
import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import { PermissionsToCheck } from '../interfaces'; import { PermissionsToCheck, ProcessInstanceTask } from '../interfaces';
import { usePermissionFetcher } from '../hooks/PermissionService'; import { usePermissionFetcher } from '../hooks/PermissionService';
type OwnProps = { type OwnProps = {
processModelId: string; processModelId: string;
diagramType: string; diagramType: string;
readyOrWaitingBpmnTaskIds?: string[] | null; readyOrWaitingProcessInstanceTasks?: ProcessInstanceTask[] | null;
completedTasksBpmnIds?: string[] | null; completedProcessInstanceTasks?: ProcessInstanceTask[] | null;
saveDiagram?: (..._args: any[]) => any; saveDiagram?: (..._args: any[]) => any;
onDeleteFile?: (..._args: any[]) => any; onDeleteFile?: (..._args: any[]) => any;
isPrimaryFile?: boolean;
onSetPrimaryFile?: (..._args: any[]) => any; onSetPrimaryFile?: (..._args: any[]) => any;
diagramXML?: string | null; diagramXML?: string | null;
fileName?: string; fileName?: string;
@ -88,10 +91,11 @@ type OwnProps = {
export default function ReactDiagramEditor({ export default function ReactDiagramEditor({
processModelId, processModelId,
diagramType, diagramType,
readyOrWaitingBpmnTaskIds, readyOrWaitingProcessInstanceTasks,
completedTasksBpmnIds, completedProcessInstanceTasks,
saveDiagram, saveDiagram,
onDeleteFile, onDeleteFile,
isPrimaryFile,
onSetPrimaryFile, onSetPrimaryFile,
diagramXML, diagramXML,
fileName, fileName,
@ -119,6 +123,7 @@ export default function ReactDiagramEditor({
[targetUris.processModelFileShowPath]: ['POST', 'GET', 'PUT', 'DELETE'], [targetUris.processModelFileShowPath]: ['POST', 'GET', 'PUT', 'DELETE'],
}; };
const { ability } = usePermissionFetcher(permissionRequestData); const { ability } = usePermissionFetcher(permissionRequestData);
const navigate = useNavigate();
useEffect(() => { useEffect(() => {
if (diagramModelerState) { if (diagramModelerState) {
@ -227,7 +232,11 @@ export default function ReactDiagramEditor({
function handleElementClick(event: any) { function handleElementClick(event: any) {
if (onElementClick) { if (onElementClick) {
onElementClick(event.element); const canvas = diagramModeler.get('canvas');
const bpmnProcessIdentifiers = getBpmnProcessIdentifiers(
canvas.getRootElement()
);
onElementClick(event.element, bpmnProcessIdentifiers);
} }
} }
@ -350,12 +359,19 @@ export default function ReactDiagramEditor({
function highlightBpmnIoElement( function highlightBpmnIoElement(
canvas: any, canvas: any,
taskBpmnId: string, processInstanceTask: ProcessInstanceTask,
bpmnIoClassName: string bpmnIoClassName: string,
bpmnProcessIdentifiers: string[]
) { ) {
if (checkTaskCanBeHighlighted(taskBpmnId)) { if (checkTaskCanBeHighlighted(processInstanceTask.name)) {
try { try {
canvas.addMarker(taskBpmnId, bpmnIoClassName); if (
bpmnProcessIdentifiers.includes(
processInstanceTask.process_identifier
)
) {
canvas.addMarker(processInstanceTask.name, bpmnIoClassName);
}
} catch (bpmnIoError: any) { } catch (bpmnIoError: any) {
// the task list also contains task for processes called from call activities which will // the task list also contains task for processes called from call activities which will
// not exist in this diagram so just ignore them for now. // not exist in this diagram so just ignore them for now.
@ -394,21 +410,29 @@ export default function ReactDiagramEditor({
// highlighting a field // highlighting a field
// Option 3 at: // Option 3 at:
// https://github.com/bpmn-io/bpmn-js-examples/tree/master/colors // https://github.com/bpmn-io/bpmn-js-examples/tree/master/colors
if (readyOrWaitingBpmnTaskIds) { if (readyOrWaitingProcessInstanceTasks) {
readyOrWaitingBpmnTaskIds.forEach((readyOrWaitingBpmnTaskId) => { const bpmnProcessIdentifiers = getBpmnProcessIdentifiers(
canvas.getRootElement()
);
readyOrWaitingProcessInstanceTasks.forEach((readyOrWaitingBpmnTask) => {
highlightBpmnIoElement( highlightBpmnIoElement(
canvas, canvas,
readyOrWaitingBpmnTaskId, readyOrWaitingBpmnTask,
'active-task-highlight' 'active-task-highlight',
bpmnProcessIdentifiers
); );
}); });
} }
if (completedTasksBpmnIds) { if (completedProcessInstanceTasks) {
completedTasksBpmnIds.forEach((completedTaskBpmnId) => { const bpmnProcessIdentifiers = getBpmnProcessIdentifiers(
canvas.getRootElement()
);
completedProcessInstanceTasks.forEach((completedTask) => {
highlightBpmnIoElement( highlightBpmnIoElement(
canvas, canvas,
completedTaskBpmnId, completedTask,
'completed-task-highlight' 'completed-task-highlight',
bpmnProcessIdentifiers
); );
}); });
} }
@ -484,8 +508,8 @@ export default function ReactDiagramEditor({
diagramType, diagramType,
diagramXML, diagramXML,
diagramXMLString, diagramXMLString,
readyOrWaitingBpmnTaskIds, readyOrWaitingProcessInstanceTasks,
completedTasksBpmnIds, completedProcessInstanceTasks,
fileName, fileName,
performingXmlUpdates, performingXmlUpdates,
processModelId, processModelId,
@ -533,6 +557,8 @@ export default function ReactDiagramEditor({
}); });
}; };
const canViewXml = fileName !== undefined;
const userActionOptions = () => { const userActionOptions = () => {
if (diagramType !== 'readonly') { if (diagramType !== 'readonly') {
return ( return (
@ -549,7 +575,7 @@ export default function ReactDiagramEditor({
a={targetUris.processModelFileShowPath} a={targetUris.processModelFileShowPath}
ability={ability} ability={ability}
> >
{fileName && ( {fileName && !isPrimaryFile && (
<ButtonWithConfirmation <ButtonWithConfirmation
description={`Delete file ${fileName}?`} description={`Delete file ${fileName}?`}
onConfirmation={handleDelete} onConfirmation={handleDelete}
@ -571,6 +597,23 @@ export default function ReactDiagramEditor({
> >
<Button onClick={downloadXmlFile}>Download</Button> <Button onClick={downloadXmlFile}>Download</Button>
</Can> </Can>
<Can
I="GET"
a={targetUris.processModelFileShowPath}
ability={ability}
>
{canViewXml && (
<Button
onClick={() => {
navigate(
`/admin/process-models/${processModelId}/form/${fileName}`
);
}}
>
View XML
</Button>
)}
</Can>
</> </>
); );
} }

View File

@ -208,5 +208,29 @@ export const refreshAtInterval = (
() => clearInterval(intervalRef), () => clearInterval(intervalRef),
timeout * 1000 timeout * 1000
); );
return [intervalRef, timeoutRef]; return () => {
clearInterval(intervalRef);
clearTimeout(timeoutRef);
};
};
const getChildProcesses = (bpmnElement: any) => {
let elements: string[] = [];
bpmnElement.children.forEach((c: any) => {
if (c.type === 'bpmn:Participant') {
if (c.businessObject.processRef) {
elements.push(c.businessObject.processRef.id);
}
elements = [...elements, ...getChildProcesses(c)];
} else if (c.type === 'bpmn:SubProcess') {
elements.push(c.id);
}
});
return elements;
};
export const getBpmnProcessIdentifiers = (rootBpmnElement: any) => {
const childProcesses = getChildProcesses(rootBpmnElement);
childProcesses.push(rootBpmnElement.businessObject.id);
return childProcesses;
}; };

View File

@ -14,7 +14,8 @@ export const useUriListForPermissions = () => {
processInstanceListPath: '/v1.0/process-instances', processInstanceListPath: '/v1.0/process-instances',
processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`,
processInstanceReportListPath: '/v1.0/process-instances/reports', processInstanceReportListPath: '/v1.0/process-instances/reports',
processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`,
processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`,
processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`,
processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`,
processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`,

View File

@ -12,13 +12,16 @@ export interface RecentProcessModel {
} }
export interface ProcessInstanceTask { export interface ProcessInstanceTask {
id: number; id: string;
process_model_display_name: string; process_model_display_name: string;
process_model_identifier: string; process_model_identifier: string;
task_title: string; task_title: string;
lane_assignment_id: string; lane_assignment_id: string;
process_instance_status: number; process_instance_status: number;
updated_at_in_seconds: number; updated_at_in_seconds: number;
state: string;
process_identifier: string;
name: string;
} }
export interface ProcessReference { export interface ProcessReference {
@ -49,6 +52,7 @@ export interface ProcessInstance {
id: number; id: number;
process_model_identifier: string; process_model_identifier: string;
process_model_display_name: string; process_model_display_name: string;
spiff_step?: number;
} }
export interface MessageCorrelationProperties { export interface MessageCorrelationProperties {

View File

@ -21,10 +21,11 @@ export default function ProcessInstanceList() {
<ProcessBreadcrumb <ProcessBreadcrumb
hotCrumbs={[ hotCrumbs={[
['Process Groups', '/admin'], ['Process Groups', '/admin'],
[ {
`Process Model: ${processModelFullIdentifier}`, entityToExplode: processModelFullIdentifier,
`process_model:${processModelFullIdentifier}:link`, entityType: 'process-model-id',
], linkLastItem: true,
},
['Process Instances'], ['Process Instances'],
]} ]}
/> />

View File

@ -1,6 +1,11 @@
import { useContext, useEffect, useState } from 'react'; import { useContext, useEffect, useState } from 'react';
import Editor from '@monaco-editor/react'; import Editor from '@monaco-editor/react';
import { useParams, useNavigate, Link } from 'react-router-dom'; import {
useParams,
useNavigate,
Link,
useSearchParams,
} from 'react-router-dom';
import { import {
TrashCan, TrashCan,
StopOutline, StopOutline,
@ -34,15 +39,21 @@ import {
import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; import ButtonWithConfirmation from '../components/ButtonWithConfirmation';
import ErrorContext from '../contexts/ErrorContext'; import ErrorContext from '../contexts/ErrorContext';
import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import { PermissionsToCheck } from '../interfaces'; import {
PermissionsToCheck,
ProcessInstance,
ProcessInstanceTask,
} from '../interfaces';
import { usePermissionFetcher } from '../hooks/PermissionService'; import { usePermissionFetcher } from '../hooks/PermissionService';
export default function ProcessInstanceShow() { export default function ProcessInstanceShow() {
const navigate = useNavigate(); const navigate = useNavigate();
const params = useParams(); const params = useParams();
const [searchParams] = useSearchParams();
const [processInstance, setProcessInstance] = useState(null); const [processInstance, setProcessInstance] =
const [tasks, setTasks] = useState<Array<object> | null>(null); useState<ProcessInstance | null>(null);
const [tasks, setTasks] = useState<ProcessInstanceTask[] | null>(null);
const [tasksCallHadError, setTasksCallHadError] = useState<boolean>(false); const [tasksCallHadError, setTasksCallHadError] = useState<boolean>(false);
const [taskToDisplay, setTaskToDisplay] = useState<object | null>(null); const [taskToDisplay, setTaskToDisplay] = useState<object | null>(null);
const [taskDataToDisplay, setTaskDataToDisplay] = useState<string>(''); const [taskDataToDisplay, setTaskDataToDisplay] = useState<string>('');
@ -59,8 +70,10 @@ export default function ProcessInstanceShow() {
const permissionRequestData: PermissionsToCheck = { const permissionRequestData: PermissionsToCheck = {
[targetUris.messageInstanceListPath]: ['GET'], [targetUris.messageInstanceListPath]: ['GET'],
[targetUris.processInstanceTaskListPath]: ['GET'], [targetUris.processInstanceTaskListPath]: ['GET'],
[targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'],
[targetUris.processInstanceActionPath]: ['DELETE'], [targetUris.processInstanceActionPath]: ['DELETE'],
[targetUris.processInstanceLogListPath]: ['GET'], [targetUris.processInstanceLogListPath]: ['GET'],
[targetUris.processModelShowPath]: ['PUT'],
[`${targetUris.processInstanceActionPath}/suspend`]: ['PUT'], [`${targetUris.processInstanceActionPath}/suspend`]: ['PUT'],
[`${targetUris.processInstanceActionPath}/terminate`]: ['PUT'], [`${targetUris.processInstanceActionPath}/terminate`]: ['PUT'],
[`${targetUris.processInstanceActionPath}/resume`]: ['PUT'], [`${targetUris.processInstanceActionPath}/resume`]: ['PUT'],
@ -80,17 +93,28 @@ export default function ProcessInstanceShow() {
const processTaskFailure = () => { const processTaskFailure = () => {
setTasksCallHadError(true); setTasksCallHadError(true);
}; };
let queryParams = '';
const processIdentifier = searchParams.get('process_identifier');
if (processIdentifier) {
queryParams = `?process_identifier=${processIdentifier}`;
}
HttpService.makeCallToBackend({ HttpService.makeCallToBackend({
path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}`, path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`,
successCallback: setProcessInstance, successCallback: setProcessInstance,
}); });
let taskParams = '?all_tasks=true'; let taskParams = '?all_tasks=true';
if (typeof params.spiff_step !== 'undefined') { if (typeof params.spiff_step !== 'undefined') {
taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; taskParams = `${taskParams}&spiff_step=${params.spiff_step}`;
} }
if (ability.can('GET', targetUris.processInstanceTaskListPath)) { let taskPath = '';
if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) {
taskPath = `${targetUris.processInstanceTaskListDataPath}${taskParams}`;
} else if (ability.can('GET', targetUris.processInstanceTaskListPath)) {
taskPath = `${targetUris.processInstanceTaskListPath}${taskParams}`;
}
if (taskPath) {
HttpService.makeCallToBackend({ HttpService.makeCallToBackend({
path: `${targetUris.processInstanceTaskListPath}${taskParams}`, path: taskPath,
successCallback: setTasks, successCallback: setTasks,
failureCallback: processTaskFailure, failureCallback: processTaskFailure,
}); });
@ -98,7 +122,14 @@ export default function ProcessInstanceShow() {
setTasksCallHadError(true); setTasksCallHadError(true);
} }
} }
}, [params, modifiedProcessModelId, permissionsLoaded, ability, targetUris]); }, [
params,
modifiedProcessModelId,
permissionsLoaded,
ability,
targetUris,
searchParams,
]);
const deleteProcessInstance = () => { const deleteProcessInstance = () => {
HttpService.makeCallToBackend({ HttpService.makeCallToBackend({
@ -140,12 +171,12 @@ export default function ProcessInstanceShow() {
const getTaskIds = () => { const getTaskIds = () => {
const taskIds = { completed: [], readyOrWaiting: [] }; const taskIds = { completed: [], readyOrWaiting: [] };
if (tasks) { if (tasks) {
tasks.forEach(function getUserTasksElement(task: any) { tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) {
if (task.state === 'COMPLETED') { if (task.state === 'COMPLETED') {
(taskIds.completed as any).push(task.name); (taskIds.completed as any).push(task);
} }
if (task.state === 'READY' || task.state === 'WAITING') { if (task.state === 'READY' || task.state === 'WAITING') {
(taskIds.readyOrWaiting as any).push(task.name); (taskIds.readyOrWaiting as any).push(task);
} }
}); });
} }
@ -175,15 +206,18 @@ export default function ProcessInstanceShow() {
label: any, label: any,
distance: number distance: number
) => { ) => {
const processIdentifier = searchParams.get('process_identifier');
let queryParams = '';
if (processIdentifier) {
queryParams = `?process_identifier=${processIdentifier}`;
}
return ( return (
<Link <Link
reloadDocument reloadDocument
data-qa="process-instance-step-link" data-qa="process-instance-step-link"
to={`/admin/process-instances/${ to={`/admin/process-instances/${params.process_model_id}/${
params.process_model_id params.process_instance_id
}/process-instances/${params.process_instance_id}/${ }/${currentSpiffStep(processInstanceToUse) + distance}${queryParams}`}
currentSpiffStep(processInstanceToUse) + distance
}`}
> >
{label} {label}
</Link> </Link>
@ -364,10 +398,15 @@ export default function ProcessInstanceShow() {
} }
}; };
const handleClickedDiagramTask = (shapeElement: any) => { const handleClickedDiagramTask = (
shapeElement: any,
bpmnProcessIdentifiers: any
) => {
if (tasks) { if (tasks) {
const matchingTask: any = tasks.find( const matchingTask: any = tasks.find(
(task: any) => task.name === shapeElement.id (task: any) =>
task.name === shapeElement.id &&
bpmnProcessIdentifiers.includes(task.process_identifier)
); );
if (matchingTask) { if (matchingTask) {
setTaskToDisplay(matchingTask); setTaskToDisplay(matchingTask);
@ -411,7 +450,9 @@ export default function ProcessInstanceShow() {
const canEditTaskData = (task: any) => { const canEditTaskData = (task: any) => {
return ( return (
task.state === 'READY' && showingLastSpiffStep(processInstance as any) ability.can('PUT', targetUris.processInstanceTaskListDataPath) &&
task.state === 'READY' &&
showingLastSpiffStep(processInstance as any)
); );
}; };
@ -460,7 +501,10 @@ export default function ProcessInstanceShow() {
const taskDataButtons = (task: any) => { const taskDataButtons = (task: any) => {
const buttons = []; const buttons = [];
if (task.type === 'Script Task') { if (
task.type === 'Script Task' &&
ability.can('PUT', targetUris.processModelShowPath)
) {
buttons.push( buttons.push(
<Button <Button
data-qa="create-script-unit-test-button" data-qa="create-script-unit-test-button"
@ -471,19 +515,28 @@ export default function ProcessInstanceShow() {
); );
} }
if (task.type === 'Call Activity') {
buttons.push(
<Link
data-qa="go-to-call-activity-result"
to={`${window.location.pathname}?process_identifier=${task.call_activity_process_identifier}`}
target="_blank"
>
View Call Activity Diagram
</Link>
);
}
if (canEditTaskData(task)) { if (canEditTaskData(task)) {
if (editingTaskData) { if (editingTaskData) {
buttons.push( buttons.push(
<Button <Button data-qa="save-task-data-button" onClick={saveTaskData}>
data-qa="create-script-unit-test-button"
onClick={saveTaskData}
>
Save Save
</Button> </Button>
); );
buttons.push( buttons.push(
<Button <Button
data-qa="create-script-unit-test-button" data-qa="cancel-task-data-edit-button"
onClick={cancelEditingTaskData} onClick={cancelEditingTaskData}
> >
Cancel Cancel
@ -492,7 +545,7 @@ export default function ProcessInstanceShow() {
} else { } else {
buttons.push( buttons.push(
<Button <Button
data-qa="create-script-unit-test-button" data-qa="edit-task-data-button"
onClick={() => setEditingTaskData(true)} onClick={() => setEditingTaskData(true)}
> >
Edit Edit
@ -622,8 +675,8 @@ export default function ProcessInstanceShow() {
processModelId={processModelId || ''} processModelId={processModelId || ''}
diagramXML={processInstanceToUse.bpmn_xml_file_contents || ''} diagramXML={processInstanceToUse.bpmn_xml_file_contents || ''}
fileName={processInstanceToUse.bpmn_xml_file_contents || ''} fileName={processInstanceToUse.bpmn_xml_file_contents || ''}
readyOrWaitingBpmnTaskIds={taskIds.readyOrWaiting} readyOrWaitingProcessInstanceTasks={taskIds.readyOrWaiting}
completedTasksBpmnIds={taskIds.completed} completedProcessInstanceTasks={taskIds.completed}
diagramType="readonly" diagramType="readonly"
onElementClick={handleClickedDiagramTask} onElementClick={handleClickedDiagramTask}
/> />

View File

@ -25,6 +25,7 @@ import {
ProcessReference, ProcessReference,
} from '../interfaces'; } from '../interfaces';
import ProcessSearch from '../components/ProcessSearch'; import ProcessSearch from '../components/ProcessSearch';
import { Notification } from '../components/Notification';
export default function ProcessModelEditDiagram() { export default function ProcessModelEditDiagram() {
const [showFileNameEditor, setShowFileNameEditor] = useState(false); const [showFileNameEditor, setShowFileNameEditor] = useState(false);
@ -157,6 +158,8 @@ export default function ProcessModelEditDiagram() {
} }
}; };
const [displaySaveFileMessage, setDisplaySaveFileMessage] =
useState<boolean>(false);
const saveDiagram = (bpmnXML: any, fileName = params.file_name) => { const saveDiagram = (bpmnXML: any, fileName = params.file_name) => {
setErrorMessage(null); setErrorMessage(null);
setBpmnXmlForDiagramRendering(bpmnXML); setBpmnXmlForDiagramRendering(bpmnXML);
@ -192,6 +195,7 @@ export default function ProcessModelEditDiagram() {
// after saving the file, make sure we null out newFileName // after saving the file, make sure we null out newFileName
// so it does not get used over the params // so it does not get used over the params
setNewFileName(''); setNewFileName('');
setDisplaySaveFileMessage(true);
}; };
const onDeleteFile = (fileName = params.file_name) => { const onDeleteFile = (fileName = params.file_name) => {
@ -819,6 +823,7 @@ export default function ProcessModelEditDiagram() {
processModelId={params.process_model_id || ''} processModelId={params.process_model_id || ''}
saveDiagram={saveDiagram} saveDiagram={saveDiagram}
onDeleteFile={onDeleteFile} onDeleteFile={onDeleteFile}
isPrimaryFile={params.file_name === processModel?.primary_file_name}
onSetPrimaryFile={onSetPrimaryFileCallback} onSetPrimaryFile={onSetPrimaryFileCallback}
diagramXML={bpmnXmlForDiagramRendering} diagramXML={bpmnXmlForDiagramRendering}
fileName={params.file_name} fileName={params.file_name}
@ -836,6 +841,20 @@ export default function ProcessModelEditDiagram() {
); );
}; };
const saveFileMessage = () => {
if (displaySaveFileMessage) {
return (
<Notification
title="File Saved: "
onClose={() => setDisplaySaveFileMessage(false)}
>
Changes to the file were saved.
</Notification>
);
}
return null;
};
// if a file name is not given then this is a new model and the ReactDiagramEditor component will handle it // if a file name is not given then this is a new model and the ReactDiagramEditor component will handle it
if ((bpmnXmlForDiagramRendering || !params.file_name) && processModel) { if ((bpmnXmlForDiagramRendering || !params.file_name) && processModel) {
const processModelFileName = processModelFile ? processModelFile.name : ''; const processModelFileName = processModelFile ? processModelFile.name : '';
@ -856,6 +875,7 @@ export default function ProcessModelEditDiagram() {
Process Model File{processModelFile ? ': ' : ''} Process Model File{processModelFile ? ': ' : ''}
{processModelFileName} {processModelFileName}
</h1> </h1>
{saveFileMessage()}
{appropriateEditor()} {appropriateEditor()}
{newFileNameBox()} {newFileNameBox()}
{scriptEditor()} {scriptEditor()}

View File

@ -264,25 +264,27 @@ export default function ProcessModelShow() {
</Can> </Can>
); );
elements.push( if (!isPrimaryBpmnFile) {
<Can elements.push(
I="DELETE" <Can
a={targetUris.processModelFileCreatePath} I="DELETE"
ability={ability} a={targetUris.processModelFileCreatePath}
> ability={ability}
<ButtonWithConfirmation >
kind="ghost" <ButtonWithConfirmation
renderIcon={TrashCan} kind="ghost"
iconDescription="Delete File" renderIcon={TrashCan}
hasIconOnly iconDescription="Delete File"
description={`Delete file: ${processModelFile.name}`} hasIconOnly
onConfirmation={() => { description={`Delete file: ${processModelFile.name}`}
onDeleteFile(processModelFile.name); onConfirmation={() => {
}} onDeleteFile(processModelFile.name);
confirmButtonLabel="Delete" }}
/> confirmButtonLabel="Delete"
</Can> />
); </Can>
);
}
if (processModelFile.name.match(/\.bpmn$/) && !isPrimaryBpmnFile) { if (processModelFile.name.match(/\.bpmn$/) && !isPrimaryBpmnFile) {
elements.push( elements.push(
<Can I="PUT" a={targetUris.processModelShowPath} ability={ability}> <Can I="PUT" a={targetUris.processModelShowPath} ability={ability}>
@ -360,27 +362,76 @@ export default function ProcessModelShow() {
); );
}; };
const [fileUploadEvent, setFileUploadEvent] = useState(null);
const [duplicateFilename, setDuplicateFilename] = useState<String>('');
const [showOverwriteConfirmationPrompt, setShowOverwriteConfirmationPrompt] =
useState(false);
const doFileUpload = (event: any) => {
event.preventDefault();
const url = `/process-models/${modifiedProcessModelId}/files`;
const formData = new FormData();
formData.append('file', filesToUpload[0]);
formData.append('fileName', filesToUpload[0].name);
HttpService.makeCallToBackend({
path: url,
successCallback: onUploadedCallback,
httpMethod: 'POST',
postBody: formData,
});
setFilesToUpload(null);
};
const handleFileUploadCancel = () => { const handleFileUploadCancel = () => {
setShowFileUploadModal(false); setShowFileUploadModal(false);
setFilesToUpload(null); setFilesToUpload(null);
}; };
const handleOverwriteFileConfirm = () => {
setShowOverwriteConfirmationPrompt(false);
doFileUpload(fileUploadEvent);
};
const handleOverwriteFileCancel = () => {
setShowOverwriteConfirmationPrompt(false);
setFilesToUpload(null);
};
const confirmOverwriteFileDialog = () => {
return (
<Modal
danger
open={showOverwriteConfirmationPrompt}
data-qa="file-overwrite-modal-confirmation-dialog"
modalHeading={`Overwrite the file: ${duplicateFilename}`}
modalLabel="Overwrite file?"
primaryButtonText="Yes"
secondaryButtonText="Cancel"
onSecondarySubmit={handleOverwriteFileCancel}
onRequestSubmit={handleOverwriteFileConfirm}
onRequestClose={handleOverwriteFileCancel}
/>
);
};
const displayOverwriteConfirmation = (filename: String) => {
setDuplicateFilename(filename);
setShowOverwriteConfirmationPrompt(true);
};
const checkDuplicateFile = (event: any) => {
if (processModel && processModel.files.length > 0) {
processModel.files.forEach((file) => {
if (file.name === filesToUpload[0].name) {
displayOverwriteConfirmation(file.name);
setFileUploadEvent(event);
}
});
}
};
const handleFileUpload = (event: any) => { const handleFileUpload = (event: any) => {
if (processModel) { if (processModel) {
event.preventDefault(); checkDuplicateFile(event);
const url = `/process-models/${modifiedProcessModelId}/files`;
const formData = new FormData();
formData.append('file', filesToUpload[0]);
formData.append('fileName', filesToUpload[0].name);
HttpService.makeCallToBackend({
path: url,
successCallback: onUploadedCallback,
httpMethod: 'POST',
postBody: formData,
});
} }
setShowFileUploadModal(false); setShowFileUploadModal(false);
setFilesToUpload(null);
}; };
const fileUploadModal = () => { const fileUploadModal = () => {
@ -548,6 +599,7 @@ export default function ProcessModelShow() {
return ( return (
<> <>
{fileUploadModal()} {fileUploadModal()}
{confirmOverwriteFileDialog()}
<ProcessBreadcrumb <ProcessBreadcrumb
hotCrumbs={[ hotCrumbs={[
['Process Groups', '/admin'], ['Process Groups', '/admin'],

View File

@ -36,7 +36,20 @@ export default function ReactFormEditor() {
return searchParams.get('file_ext') ?? 'json'; return searchParams.get('file_ext') ?? 'json';
})(); })();
const editorDefaultLanguage = fileExtension === 'md' ? 'markdown' : 'json'; const hasDiagram = fileExtension === 'bpmn' || fileExtension === 'dmn';
const editorDefaultLanguage = (() => {
if (fileExtension === 'json') {
return 'json';
}
if (hasDiagram) {
return 'xml';
}
if (fileExtension === 'md') {
return 'markdown';
}
return 'text';
})();
const modifiedProcessModelId = modifyProcessIdentifierForPathParam( const modifiedProcessModelId = modifyProcessIdentifierForPathParam(
`${params.process_model_id}` `${params.process_model_id}`
@ -193,6 +206,19 @@ export default function ReactFormEditor() {
buttonLabel="Delete" buttonLabel="Delete"
/> />
) : null} ) : null}
{hasDiagram ? (
<Button
onClick={() =>
navigate(
`/admin/process-models/${modifiedProcessModelId}/files/${params.file_name}`
)
}
variant="danger"
data-qa="view-diagram-button"
>
View Diagram
</Button>
) : null}
<Editor <Editor
height={600} height={600}
width="auto" width="auto"

View File

@ -40,7 +40,7 @@ export default function TaskShow() {
const { targetUris } = useUriListForPermissions(); const { targetUris } = useUriListForPermissions();
const permissionRequestData: PermissionsToCheck = { const permissionRequestData: PermissionsToCheck = {
[targetUris.processInstanceTaskListPath]: ['GET'], [targetUris.processInstanceTaskListDataPath]: ['GET'],
}; };
const { ability, permissionsLoaded } = usePermissionFetcher( const { ability, permissionsLoaded } = usePermissionFetcher(
permissionRequestData permissionRequestData
@ -50,7 +50,7 @@ export default function TaskShow() {
if (permissionsLoaded) { if (permissionsLoaded) {
const processResult = (result: any) => { const processResult = (result: any) => {
setTask(result); setTask(result);
if (ability.can('GET', targetUris.processInstanceTaskListPath)) { if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) {
HttpService.makeCallToBackend({ HttpService.makeCallToBackend({
path: `/task-data/${modifyProcessIdentifierForPathParam( path: `/task-data/${modifyProcessIdentifierForPathParam(
result.process_model_identifier result.process_model_identifier

View File

@ -26,7 +26,7 @@ type backendCallProps = {
postBody?: any; postBody?: any;
}; };
class UnauthenticatedError extends Error { export class UnauthenticatedError extends Error {
constructor(message: string) { constructor(message: string) {
super(message); super(message);
this.name = 'UnauthenticatedError'; this.name = 'UnauthenticatedError';

View File

@ -27,8 +27,8 @@ const doLogout = () => {
const idToken = getIdToken(); const idToken = getIdToken();
localStorage.removeItem('jwtAccessToken'); localStorage.removeItem('jwtAccessToken');
localStorage.removeItem('jwtIdToken'); localStorage.removeItem('jwtIdToken');
const redirctUrl = `${window.location.origin}/`; const redirectUrl = `${window.location.origin}`;
const url = `${BACKEND_BASE_URL}/logout?redirect_url=${redirctUrl}&id_token=${idToken}`; const url = `${BACKEND_BASE_URL}/logout?redirect_url=${redirectUrl}&id_token=${idToken}`;
window.location.href = url; window.location.href = url;
}; };