diff --git a/flask-bpmn/src/flask_bpmn/api/api_error.py b/flask-bpmn/src/flask_bpmn/api/api_error.py index eb390abe1..c782c2d38 100644 --- a/flask-bpmn/src/flask_bpmn/api/api_error.py +++ b/flask-bpmn/src/flask_bpmn/api/api_error.py @@ -170,15 +170,17 @@ def set_user_sentry_context() -> None: def handle_exception(exception: Exception) -> flask.wrappers.Response: """Handles unexpected exceptions.""" set_user_sentry_context() - id = capture_exception(exception) - organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG") - project_slug = current_app.config.get("SENTRY_PROJECT_SLUG") sentry_link = None - if organization_slug and project_slug: - sentry_link = ( - f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" - ) + if not isinstance(exception, ApiError) or exception.error_code != "invalid_token": + id = capture_exception(exception) + + organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG") + project_slug = current_app.config.get("SENTRY_PROJECT_SLUG") + if organization_slug and project_slug: + sentry_link = ( + f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" + ) # !!!NOTE!!!: do this after sentry stuff since calling logger.exception # seems to break the sentry sdk context where we no longer get back diff --git a/spiffworkflow-backend/bin/build_and_run_with_docker_compose b/spiffworkflow-backend/bin/build_and_run_with_docker_compose index 4356d974f..2dfa896e6 100755 --- a/spiffworkflow-backend/bin/build_and_run_with_docker_compose +++ b/spiffworkflow-backend/bin/build_and_run_with_docker_compose @@ -9,7 +9,7 @@ set -o errtrace -o errexit -o nounset -o pipefail if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" - export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models" + export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models" fi if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then diff --git a/spiffworkflow-backend/bin/git_commit_bpmn_models_repo b/spiffworkflow-backend/bin/git_commit_bpmn_models_repo index 13e18da9c..62fc0cab0 100755 --- a/spiffworkflow-backend/bin/git_commit_bpmn_models_repo +++ b/spiffworkflow-backend/bin/git_commit_bpmn_models_repo @@ -11,11 +11,12 @@ set -o errtrace -o errexit -o nounset -o pipefail bpmn_models_absolute_dir="$1" git_commit_message="$2" -git_commit_username="$3" -git_commit_email="$4" +git_branch="$3" +git_commit_username="$4" +git_commit_email="$5" -if [[ -z "${2:-}" ]]; then - >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]" +if [[ -z "${5:-}" ]]; then + >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]" exit 1 fi @@ -26,11 +27,8 @@ git add . if [ -z "$(git status --porcelain)" ]; then echo "No changes to commit" else - if [[ -n "$git_commit_username" ]]; then - git config --local user.name "$git_commit_username" - fi - if [[ -n "$git_commit_email" ]]; then - git config --local user.email "$git_commit_email" - fi + git config --local user.name "$git_commit_username" + git config --local user.email "$git_commit_email" git commit -m "$git_commit_message" + git push --set-upstream origin "$git_branch" fi diff --git a/spiffworkflow-backend/bin/start_keycloak b/spiffworkflow-backend/bin/start_keycloak index 32b502ca0..f76347da7 100755 --- a/spiffworkflow-backend/bin/start_keycloak +++ b/spiffworkflow-backend/bin/start_keycloak @@ -18,7 +18,19 @@ set -o errtrace -o errexit -o nounset -o pipefail if ! docker network inspect spiffworkflow > /dev/null 2>&1; then docker network create spiffworkflow fi -docker rm keycloak 2>/dev/null || echo 'no keycloak container found, safe to start new container' + +# https://stackoverflow.com/a/60579344/6090676 +container_name="keycloak" +if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then + echo ":: Found container - $container_name" + if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then + echo ":: Stopping running container - $container_name" + docker stop $container_name + fi + echo ":: Removing stopped container - $container_name" + docker rm $container_name +fi + docker run \ -p 7002:8080 \ -d \ diff --git a/spiffworkflow-backend/docker-compose.yml b/spiffworkflow-backend/docker-compose.yml index 1cbe9dcb7..410cbb7ab 100644 --- a/spiffworkflow-backend/docker-compose.yml +++ b/spiffworkflow-backend/docker-compose.yml @@ -68,7 +68,7 @@ services: - "7000:7000" network_mode: host volumes: - - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models - ./log:/app/log healthcheck: test: curl localhost:7000/v1.0/status --fail @@ -82,7 +82,7 @@ services: profiles: - debug volumes: - - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models - ./:/app command: /app/bin/boot_in_docker_debug_mode diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index a23004b40..5bcb2d0f4 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -654,7 +654,7 @@ werkzeug = "*" type = "git" url = "https://github.com/sartography/flask-bpmn" reference = "main" -resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4" +resolved_reference = "0f2d249d0e799bec912d46132e9ef9754fdacbd7" [[package]] name = "Flask-Cors" @@ -1851,7 +1851,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134" +resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994" [[package]] name = "SQLAlchemy" @@ -2563,7 +2563,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2572,7 +2571,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2581,7 +2579,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, @@ -2880,7 +2877,10 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, + {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, + {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, + {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, @@ -2989,18 +2989,7 @@ psycopg2 = [ {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, ] pyasn1 = [ - {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, - {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, - {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, - {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, - {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, - {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, - {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, - {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, - {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, - {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index d14a514b6..3d78cb43e 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -616,15 +616,9 @@ paths: items: $ref: "#/components/schemas/Workflow" - /process-models/{process_group_id}/{process_model_id}/script-unit-tests: + /process-models/{modified_process_model_identifier}/script-unit-tests: parameters: - - name: process_group_id - in: path - required: true - description: The unique id of an existing process group - schema: - type: string - - name: process_model_id + - name: modified_process_model_identifier in: path required: true description: The unique id of an existing process model. @@ -643,15 +637,9 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-models/{process_group_id}/{process_model_id}/script-unit-tests/run: + /process-models/{modified_process_model_identifier}/script-unit-tests/run: parameters: - - name: process_group_id - in: path - required: true - description: The unique id of an existing process group - schema: - type: string - - name: process_model_id + - name: modified_process_model_identifier in: path required: true description: The unique id of an existing process model. @@ -691,6 +679,53 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-instances/{modified_process_model_identifier}/{process_instance_id}/task-info: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The unique id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string + - name: all_tasks + in: query + required: false + description: If true, this wil return all tasks associated with the process instance and not just user tasks. + schema: + type: boolean + - name: spiff_step + in: query + required: false + description: If set will return the tasks as they were during a specific step of execution. + schema: + type: integer + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data + summary: returns the list of all user tasks associated with process instance without the task data + responses: + "200": + description: list of tasks + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + /process-instances/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier @@ -705,6 +740,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string get: tags: - Process Instances @@ -1166,8 +1207,8 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list - summary: returns the list of all user tasks associated with process instance + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_with_task_data + summary: returns the list of all user tasks associated with process instance with the task data responses: "200": description: list of tasks diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py index 15cbead83..39e10cb58 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py @@ -17,5 +17,3 @@ GIT_CLONE_URL_FOR_PUBLISHING = environ.get( ) GIT_USERNAME = "sartography-automated-committer" GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com" -GIT_BRANCH_TO_PUBLISH_TO = "main" -GIT_BRANCH = "main" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index 737c1a708..248f2d93c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -17,7 +17,6 @@ groups: dan, mike, jason, - j, jarrad, elizabeth, jon, @@ -32,7 +31,6 @@ groups: dan, mike, jason, - j, amir, jarrad, elizabeth, @@ -64,6 +62,12 @@ groups: harmeet, ] + admin-ro: + users: + [ + j, + ] + permissions: admin: groups: [admin] @@ -71,6 +75,17 @@ permissions: allowed_permissions: [create, read, update, delete] uri: /* + admin-readonly: + groups: [admin-ro] + users: [] + allowed_permissions: [read] + uri: /* + admin-process-instances-for-readonly: + groups: [admin-ro] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /v1.0/process-instances/* + tasks-crud: groups: [everybody] users: [] @@ -114,12 +129,12 @@ permissions: users: [] allowed_permissions: [read] uri: /v1.0/processes - - task-data-read: - groups: [demo] - users: [] - allowed_permissions: [read] - uri: /v1.0/task-data/* + # + # task-data-read: + # groups: [demo] + # users: [] + # allowed_permissions: [read] + # uri: /v1.0/task-data/* manage-procurement-admin: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml new file mode 100644 index 000000000..982b945c6 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml @@ -0,0 +1,165 @@ +default_group: everybody + +groups: + admin: + users: + [ + admin, + jakub, + kb, + alex, + dan, + mike, + jason, + j, + jarrad, + elizabeth, + jon, + natalia, + ] + + Finance Team: + users: + [ + jakub, + alex, + dan, + mike, + jason, + j, + amir, + jarrad, + elizabeth, + jon, + natalia, + sasha, + fin, + fin1, + ] + + demo: + users: + [ + core, + fin, + fin1, + harmeet, + sasha, + manuchehr, + lead, + lead1 + ] + + core-contributor: + users: + [ + core, + harmeet, + ] + +permissions: + admin: + groups: [admin] + users: [] + allowed_permissions: [read] + uri: /* + admin-process-instances: + groups: [admin] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /v1.0/process-instances/* + + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /v1.0/tasks/* + + service-tasks: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/service-tasks + + + # read all for everybody + read-all-process-groups: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/process-groups/* + read-all-process-models: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/process-models/* + read-all-process-instance: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/process-instances/* + read-process-instance-reports: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/process-instances/reports/* + processes-read: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/processes + + + manage-procurement-admin-instances: + groups: ["Project Lead"] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /v1.0/process-instances/manage-procurement:* + manage-procurement-admin-instances-slash: + groups: ["Project Lead"] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /v1.0/process-instances/manage-procurement/* + manage-procurement-admin-instance-logs: + groups: ["Project Lead"] + users: [] + allowed_permissions: [read] + uri: /v1.0/logs/manage-procurement:* + manage-procurement-admin-instance-logs-slash: + groups: ["Project Lead"] + users: [] + allowed_permissions: [read] + uri: /v1.0/logs/manage-procurement/* + + manage-revenue-streams-instances: + groups: ["core-contributor", "demo"] + users: [] + allowed_permissions: [create, read] + uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-revenue-streams-instance-logs: + groups: ["core-contributor", "demo"] + users: [] + allowed_permissions: [read] + uri: /v1.0/logs/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + + manage-procurement-invoice-instances: + groups: ["core-contributor", "demo"] + users: [] + allowed_permissions: [create, read] + uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-invoice-instance-logs: + groups: ["core-contributor", "demo"] + users: [] + allowed_permissions: [read] + uri: /v1.0/logs/manage-procurement:procurement:core-contributor-invoice-management:* + + manage-procurement-instances: + groups: ["core-contributor", "demo"] + users: [] + allowed_permissions: [create, read] + uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* + manage-procurement-instance-logs: + groups: ["core-contributor", "demo"] + users: [] + allowed_permissions: [read] + uri: /v1.0/logs/manage-procurement:vendor-lifecycle-management:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml index 2e41e3b00..731de9ab0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml @@ -148,33 +148,18 @@ permissions: allowed_permissions: [create, read, update, delete] uri: /v1.0/process-groups/manage-procurement:procurement:* - manage-revenue-streams-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* manage-revenue-streams-instances: groups: ["core-contributor", "demo"] users: [] allowed_permissions: [create, read] uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-procurement-invoice-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:* manage-procurement-invoice-instances: groups: ["core-contributor", "demo"] users: [] allowed_permissions: [create, read] uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:* manage-procurement-instances: groups: ["core-contributor", "demo"] users: [] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py index 5f0fec4ca..9cc247056 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py @@ -4,3 +4,4 @@ from os import environ GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging") GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main") GIT_COMMIT_ON_SAVE = False +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py index bbda9db9a..605c1bccc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py @@ -15,6 +15,7 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug" ) +GIT_COMMIT_ON_SAVE = False # NOTE: set this here since nox shoves tests and src code to # different places and this allows us to know exactly where we are at the start diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index c9003594b..f2e4c2221 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -1,4 +1,4 @@ -"""Spiff_step_details.""" +"""Process_instance_metadata.""" from dataclasses import dataclass from flask_bpmn.models.db import db diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py index 1e85f7229..50b73fbae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py @@ -8,6 +8,10 @@ from marshmallow import INCLUDE from sqlalchemy import UniqueConstraint +class SpecReferenceNotFoundError(Exception): + """SpecReferenceNotFoundError.""" + + @dataclass() class SpecReference: """File Reference Information. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py index b0b908877..532a6c09c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py @@ -8,7 +8,7 @@ from flask_bpmn.models.db import SpiffworkflowBaseDBModel @dataclass class SpiffLoggingModel(SpiffworkflowBaseDBModel): - """LoggingModel.""" + """SpiffLoggingModel.""" __tablename__ = "spiff_logging" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index 91d70116a..9afb5d078 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -21,7 +21,7 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) spiff_step: int = db.Column(db.Integer, nullable=False) - task_json: str = deferred(db.Column(db.JSON, nullable=False)) # type: ignore + task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) completed_by_user_id: int = db.Column(db.Integer, nullable=True) lane_assignment_id: Optional[int] = db.Column( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 52bb11715..60deda842 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -108,7 +108,7 @@ class Task: multi_instance_type: Union[MultiInstanceType, None] = None, multi_instance_count: str = "", multi_instance_index: str = "", - process_name: str = "", + process_identifier: str = "", properties: Union[dict, None] = None, process_instance_id: Union[int, None] = None, process_instance_status: Union[str, None] = None, @@ -118,6 +118,7 @@ class Task: form_schema: Union[str, None] = None, form_ui_schema: Union[str, None] = None, parent: Optional[str] = None, + call_activity_process_identifier: Optional[str] = None, ): """__init__.""" self.id = id @@ -129,6 +130,7 @@ class Task: self.documentation = documentation self.lane = lane self.parent = parent + self.call_activity_process_identifier = call_activity_process_identifier self.data = data if self.data is None: @@ -151,7 +153,7 @@ class Task: self.multi_instance_index = ( multi_instance_index # And the index of the currently repeating task. ) - self.process_name = process_name + self.process_identifier = process_identifier self.properties = properties # Arbitrary extension properties from BPMN editor. if self.properties is None: @@ -177,7 +179,7 @@ class Task: "multi_instance_type": multi_instance_type, "multi_instance_count": self.multi_instance_count, "multi_instance_index": self.multi_instance_index, - "process_name": self.process_name, + "process_identifier": self.process_identifier, "properties": self.properties, "process_instance_id": self.process_instance_id, "process_instance_status": self.process_instance_status, @@ -187,6 +189,7 @@ class Task: "form_schema": self.form_schema, "form_ui_schema": self.form_ui_schema, "parent": self.parent, + "call_activity_process_identifier": self.call_activity_process_identifier, } @classmethod @@ -282,7 +285,7 @@ class TaskSchema(Schema): "multi_instance_type", "multi_instance_count", "multi_instance_index", - "process_name", + "process_identifier", "properties", "process_instance_id", "form_schema", @@ -293,7 +296,7 @@ class TaskSchema(Schema): documentation = marshmallow.fields.String(required=False, allow_none=True) # form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True) title = marshmallow.fields.String(required=False, allow_none=True) - process_name = marshmallow.fields.String(required=False, allow_none=True) + process_identifier = marshmallow.fields.String(required=False, allow_none=True) lane = marshmallow.fields.String(required=False, allow_none=True) @marshmallow.post_load diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 895f8e1ba..7907ef970 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,5 +1,6 @@ """APIs for dealing with process groups, process models, and process instances.""" import json +import os import random import re import string @@ -66,6 +67,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.secret_model import SecretModel from spiffworkflow_backend.models.secret_model import SecretModelSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceCache +from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel @@ -74,6 +76,7 @@ from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignme from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService +from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.process_instance_processor import ( @@ -167,6 +170,9 @@ def process_group_add(body: dict) -> flask.wrappers.Response: """Add_process_group.""" process_group = ProcessGroup(**body) ProcessModelService.add_process_group(process_group) + commit_and_push_to_git( + f"User: {g.user.username} added process group {process_group.id}" + ) return make_response(jsonify(process_group), 201) @@ -174,6 +180,9 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo """Process_group_delete.""" process_group_id = un_modify_modified_process_model_id(modified_process_group_id) ProcessModelService().process_group_delete(process_group_id) + commit_and_push_to_git( + f"User: {g.user.username} deleted process group {process_group_id}" + ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -191,6 +200,9 @@ def process_group_update( process_group_id = un_modify_modified_process_model_id(modified_process_group_id) process_group = ProcessGroup(id=process_group_id, **body_filtered) ProcessModelService.update_process_group(process_group) + commit_and_push_to_git( + f"User: {g.user.username} updated process group {process_group_id}" + ) return make_response(jsonify(process_group), 200) @@ -255,7 +267,10 @@ def process_group_move( new_process_group = ProcessModelService().process_group_move( original_process_group_id, new_location ) - return make_response(jsonify(new_process_group), 201) + commit_and_push_to_git( + f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}" + ) + return make_response(jsonify(new_process_group), 200) def process_model_create( @@ -303,6 +318,9 @@ def process_model_create( ) ProcessModelService.add_process_model(process_model_info) + commit_and_push_to_git( + f"User: {g.user.username} created process model {process_model_info.id}" + ) return Response( json.dumps(ProcessModelInfoSchema().dump(process_model_info)), status=201, @@ -316,6 +334,9 @@ def process_model_delete( """Process_model_delete.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") ProcessModelService().process_model_delete(process_model_identifier) + commit_and_push_to_git( + f"User: {g.user.username} deleted process model {process_model_identifier}" + ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -339,6 +360,9 @@ def process_model_update( process_model = get_process_model(process_model_identifier) ProcessModelService.update_process_model(process_model, body_filtered) + commit_and_push_to_git( + f"User: {g.user.username} updated process model {process_model_identifier}" + ) return ProcessModelInfoSchema().dump(process_model) @@ -370,7 +394,10 @@ def process_model_move( new_process_model = ProcessModelService().process_model_move( original_process_model_id, new_location ) - return make_response(jsonify(new_process_model), 201) + commit_and_push_to_git( + f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}" + ) + return make_response(jsonify(new_process_model), 200) def process_model_publish( @@ -466,14 +493,9 @@ def process_model_file_update( ) SpecFileService.update_file(process_model, file_name, request_file_contents) - - if current_app.config["GIT_COMMIT_ON_SAVE"]: - git_output = GitService.commit( - message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" - ) - current_app.logger.info(f"git output: {git_output}") - else: - current_app.logger.info("Git commit on save is disabled") + commit_and_push_to_git( + f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" + ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -495,6 +517,9 @@ def process_model_file_delete( ) ) from exception + commit_and_push_to_git( + f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}" + ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -516,6 +541,9 @@ def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response: file_contents = SpecFileService.get_data(process_model, file.name) file.file_contents = file_contents file.process_model_id = process_model.id + commit_and_push_to_git( + f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}" + ) return Response( json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" ) @@ -1031,11 +1059,11 @@ def process_instance_list( elif attribute in instance_metadata_aliases: if order_by_option.startswith("-"): order_by_query_array.append( - instance_metadata_aliases[attribute].value.desc() + func.max(instance_metadata_aliases[attribute].value).desc() ) else: order_by_query_array.append( - instance_metadata_aliases[attribute].value.asc() + func.max(instance_metadata_aliases[attribute].value).asc() ) process_instances = ( @@ -1080,25 +1108,48 @@ def process_instance_report_column_list() -> flask.wrappers.Response: def process_instance_show( - modified_process_model_identifier: str, process_instance_id: int + modified_process_model_identifier: str, + process_instance_id: int, + process_identifier: Optional[str] = None, ) -> flask.wrappers.Response: """Create_process_instance.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") process_instance = find_process_instance_by_id_or_raise(process_instance_id) current_version_control_revision = GitService.get_current_revision() - process_model = get_process_model(process_model_identifier) - if process_model.primary_file_name: + process_model_with_diagram = None + name_of_file_with_diagram = None + if process_identifier: + spec_reference = SpecReferenceCache.query.filter_by( + identifier=process_identifier + ).first() + if spec_reference is None: + raise SpecReferenceNotFoundError( + f"Could not find given process identifier in the cache: {process_identifier}" + ) + + process_model_with_diagram = ProcessModelService.get_process_model( + spec_reference.process_model_id + ) + name_of_file_with_diagram = spec_reference.file_name + else: + process_model_with_diagram = get_process_model(process_model_identifier) + if process_model_with_diagram.primary_file_name: + name_of_file_with_diagram = process_model_with_diagram.primary_file_name + + if process_model_with_diagram and name_of_file_with_diagram: if ( process_instance.bpmn_version_control_identifier == current_version_control_revision ): bpmn_xml_file_contents = SpecFileService.get_data( - process_model, process_model.primary_file_name + process_model_with_diagram, name_of_file_with_diagram ).decode("utf-8") else: bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( - process_model, process_instance.bpmn_version_control_identifier + process_model_with_diagram, + process_instance.bpmn_version_control_identifier, + file_name=name_of_file_with_diagram, ) process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents @@ -1415,11 +1466,44 @@ def get_tasks( return make_response(jsonify(response_json), 200) -def process_instance_task_list( +def process_instance_task_list_without_task_data( modified_process_model_identifier: str, process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_without_task_data.""" + return process_instance_task_list( + modified_process_model_identifier, + process_instance_id, + all_tasks, + spiff_step, + get_task_data=False, + ) + + +def process_instance_task_list_with_task_data( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_with_task_data.""" + return process_instance_task_list( + modified_process_model_identifier, + process_instance_id, + all_tasks, + spiff_step, + get_task_data=True, + ) + + +def process_instance_task_list( + _modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, + get_task_data: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" process_instance = find_process_instance_by_id_or_raise(process_instance_id) @@ -1435,7 +1519,8 @@ def process_instance_task_list( ) if step_detail is not None and process_instance.bpmn_json is not None: bpmn_json = json.loads(process_instance.bpmn_json) - bpmn_json["tasks"] = step_detail.task_json + bpmn_json["tasks"] = step_detail.task_json["tasks"] + bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] process_instance.bpmn_json = json.dumps(bpmn_json) processor = ProcessInstanceProcessor(process_instance) @@ -1449,7 +1534,8 @@ def process_instance_task_list( tasks = [] for spiff_task in spiff_tasks: task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data + if get_task_data: + task.data = spiff_task.data tasks.append(task) return make_response(jsonify(tasks), 200) @@ -1485,7 +1571,25 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response task.data = spiff_task.data task.process_model_display_name = process_model.display_name task.process_model_identifier = process_model.id + process_model_with_form = process_model + refs = SpecFileService.get_references_for_process(process_model_with_form) + all_processes = [i.identifier for i in refs] + if task.process_identifier not in all_processes: + bpmn_file_full_path = ( + ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( + task.process_identifier + ) + ) + relative_path = os.path.relpath( + bpmn_file_full_path, start=FileSystemService.root_path() + ) + process_model_relative_path = os.path.dirname(relative_path) + process_model_with_form = ( + ProcessModelService.get_process_model_from_relative_path( + process_model_relative_path + ) + ) if task.type == "User Task": if not form_schema_file_name: @@ -1614,7 +1718,7 @@ def task_submit( def script_unit_test_create( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] ) -> flask.wrappers.Response: """Script_unit_test_create.""" bpmn_task_identifier = _get_required_parameter_or_raise( @@ -1625,7 +1729,7 @@ def script_unit_test_create( "expected_output_json", body ) - process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] if file is None: @@ -1703,7 +1807,7 @@ def script_unit_test_create( def script_unit_test_run( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] ) -> flask.wrappers.Response: """Script_unit_test_run.""" # FIXME: We should probably clear this somewhere else but this works @@ -1899,7 +2003,6 @@ def secret_list( def add_secret(body: Dict) -> Response: """Add secret.""" secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) - assert secret_model # noqa: S101 return Response( json.dumps(SecretModelSchema().dump(secret_model)), status=201, @@ -2040,3 +2143,12 @@ def update_task_data( status=200, mimetype="application/json", ) + + +def commit_and_push_to_git(message: str) -> None: + """Commit_and_push_to_git.""" + if current_app.config["GIT_COMMIT_ON_SAVE"]: + git_output = GitService.commit(message=message) + current_app.logger.info(f"git output: {git_output}") + else: + current_app.logger.info("Git commit on save is disabled") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 2bbbc1374..ad98fbbc6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -16,8 +16,9 @@ from flask_bpmn.api.api_error import ApiError from werkzeug.wrappers import Response from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.authentication_service import AuthenticationService from spiffworkflow_backend.services.authentication_service import ( - AuthenticationService, + MissingAccessTokenError, ) from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.user_service import UserService @@ -268,10 +269,10 @@ def login_api_return(code: str, state: str, session_state: str) -> str: code, "/v1.0/login_api_return" ) access_token: str = auth_token_object["access_token"] - assert access_token # noqa: S101 + if access_token is None: + raise MissingAccessTokenError("Cannot find the access token for the request") + return access_token - # return redirect("localhost:7000/v1.0/ui") - # return {'uid': 'user_1'} def logout(id_token: str, redirect_url: Optional[str]) -> Response: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index f4bd357b1..95c1eaa89 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -16,6 +16,10 @@ from werkzeug.wrappers import Response from spiffworkflow_backend.models.refresh_token import RefreshTokenModel +class MissingAccessTokenError(Exception): + """MissingAccessTokenError.""" + + class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py index f972b672b..8ef952c3c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py @@ -46,24 +46,39 @@ class GitService: @classmethod def get_instance_file_contents_for_revision( - cls, process_model: ProcessModelInfo, revision: str + cls, + process_model: ProcessModelInfo, + revision: str, + file_name: Optional[str] = None, ) -> str: """Get_instance_file_contents_for_revision.""" bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] process_model_relative_path = FileSystemService.process_model_relative_path( process_model ) + file_name_to_use = file_name + if file_name_to_use is None: + file_name_to_use = process_model.primary_file_name with FileSystemService.cd(bpmn_spec_absolute_dir): shell_command = [ "git", "show", - f"{revision}:{process_model_relative_path}/{process_model.primary_file_name}", + f"{revision}:{process_model_relative_path}/{file_name_to_use}", ] return cls.run_shell_command_to_get_stdout(shell_command) @classmethod - def commit(cls, message: str, repo_path: Optional[str] = None) -> str: + def commit( + cls, + message: str, + repo_path: Optional[str] = None, + branch_name: Optional[str] = None, + ) -> str: """Commit.""" + cls.check_for_basic_configs() + branch_name_to_use = branch_name + if branch_name_to_use is None: + branch_name_to_use = current_app.config["GIT_BRANCH"] repo_path_to_use = repo_path if repo_path is None: repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] @@ -82,14 +97,25 @@ class GitService: shell_command_path, repo_path_to_use, message, + branch_name_to_use, git_username, git_email, ] return cls.run_shell_command_to_get_stdout(shell_command) @classmethod - def check_for_configs(cls) -> None: + def check_for_basic_configs(cls) -> None: + """Check_for_basic_configs.""" + if current_app.config["GIT_BRANCH"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH. " + "This is required for publishing process models" + ) + + @classmethod + def check_for_publish_configs(cls) -> None: """Check_for_configs.""" + cls.check_for_basic_configs() if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None: raise MissingGitConfigsError( "Missing config for GIT_BRANCH_TO_PUBLISH_TO. " @@ -142,7 +168,7 @@ class GitService: @classmethod def handle_web_hook(cls, webhook: dict) -> bool: """Handle_web_hook.""" - cls.check_for_configs() + cls.check_for_publish_configs() if "repository" not in webhook or "clone_url" not in webhook["repository"]: raise InvalidGitWebhookBodyError( @@ -178,7 +204,7 @@ class GitService: @classmethod def publish(cls, process_model_id: str, branch_to_update: str) -> str: """Publish.""" - cls.check_for_configs() + cls.check_for_publish_configs() source_process_model_root = FileSystemService.root_path() source_process_model_path = os.path.join( source_process_model_root, process_model_id @@ -227,10 +253,7 @@ class GitService: f"Request to publish changes to {process_model_id}, " f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}" ) - cls.commit(commit_message, destination_process_root) - cls.run_shell_command( - ["git", "push", "--set-upstream", "origin", branch_to_pull_request] - ) + cls.commit(commit_message, destination_process_root, branch_to_pull_request) # build url for github page to open PR git_remote = cls.run_shell_command_to_get_stdout( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ffe69fd72..5edc526cf 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -551,7 +551,7 @@ class ProcessInstanceProcessor: """SaveSpiffStepDetails.""" bpmn_json = self.serialize() wf_json = json.loads(bpmn_json) - task_json = wf_json["tasks"] + task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]} return { "process_instance_id": self.process_instance_model.id, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 46bd252b9..5b2781a20 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -302,6 +302,11 @@ class ProcessInstanceService: else: lane = None + if hasattr(spiff_task.task_spec, "spec"): + call_activity_process_identifier = spiff_task.task_spec.spec + else: + call_activity_process_identifier = None + parent_id = None if spiff_task.parent: parent_id = spiff_task.parent.id @@ -316,9 +321,10 @@ class ProcessInstanceService: multi_instance_type=mi_type, multi_instance_count=info["mi_count"], multi_instance_index=info["mi_index"], - process_name=spiff_task.task_spec._wf_spec.description, + process_identifier=spiff_task.task_spec._wf_spec.name, properties=props, parent=parent_id, + call_activity_process_identifier=call_activity_process_identifier, ) return task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py index 964981a85..67be986e1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py @@ -223,7 +223,7 @@ class ProcessModelService(FileSystemService): user = UserService.current_user() new_process_model_list = [] for process_model in process_models: - uri = f"/v1.0/process-models/{process_model.id.replace('/', ':')}/process-instances" + uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}" result = AuthorizationService.user_has_permission( user=user, permission="create", target_uri=uri ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index 15e25a759..6fec8b796 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -31,7 +31,6 @@ class ServiceTaskDelegate: if value.startswith(secret_prefix): key = value.removeprefix(secret_prefix) secret = SecretService().get_secret(key) - assert secret # noqa: S101 return secret.value file_prefix = "file:" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py index c69f41c30..72f59d1f7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py @@ -171,13 +171,18 @@ class SpecFileService(FileSystemService): ref.is_primary = True if ref.is_primary: - ProcessModelService.update_process_model( - process_model_info, - { - "primary_process_id": ref.identifier, - "primary_file_name": file_name, - }, - ) + update_hash = {} + if not process_model_info.primary_file_name: + update_hash["primary_process_id"] = ref.identifier + update_hash["primary_file_name"] = file_name + elif file_name == process_model_info.primary_file_name: + update_hash["primary_process_id"] = ref.identifier + + if len(update_hash) > 0: + ProcessModelService.update_process_model( + process_model_info, + update_hash, + ) SpecFileService.update_caches(ref) return file diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 0070c5c94..3bc21456e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1167,6 +1167,60 @@ class TestProcessApi(BaseTest): xml_file_contents = f_open.read() assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents + def test_process_instance_show_with_specified_process_identifier( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_instance_show_with_specified_process_identifier.""" + process_model_id = "call_activity_nested" + process_model_identifier = self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id="test_group_two", + process_model_id=process_model_id, + bpmn_file_location="call_activity_nested", + ) + spec_reference = SpecReferenceCache.query.filter_by( + identifier="Level2b" + ).first() + assert spec_reference + modified_process_model_identifier = ( + self.modify_process_identifier_for_path_param(process_model_identifier) + ) + headers = self.logged_in_headers(with_super_admin_user) + create_response = self.create_process_instance_from_process_model_id( + client, process_model_identifier, headers + ) + assert create_response.json is not None + assert create_response.status_code == 201 + process_instance_id = create_response.json["id"] + client.post( + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + show_response = client.get( + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}?process_identifier={spec_reference.identifier}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert show_response.json is not None + assert show_response.status_code == 200 + file_system_root = FileSystemService.root_path() + process_instance_file_path = ( + f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" + ) + with open(process_instance_file_path) as f_open: + xml_file_contents = f_open.read() + assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents + spec_reference_file_path = os.path.join( + file_system_root, spec_reference.relative_path + ) + with open(spec_reference_file_path) as f_open: + xml_file_contents = f_open.read() + assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents + def test_message_start_when_starting_process_instance( self, app: Flask, @@ -2496,7 +2550,7 @@ class TestProcessApi(BaseTest): f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}", headers=self.logged_in_headers(with_super_admin_user), ) - assert response.status_code == 201 + assert response.status_code == 200 assert response.json["id"] == new_process_model_path # make sure the original model does not exist @@ -2541,7 +2595,7 @@ class TestProcessApi(BaseTest): f"/v1.0/process-groups/{modified_original_process_group_id}/move?new_location={new_location}", headers=self.logged_in_headers(with_super_admin_user), ) - assert response.status_code == 201 + assert response.status_code == 200 assert response.json["id"] == new_sub_path # make sure the original subgroup does not exist diff --git a/spiffworkflow-frontend/.gitignore b/spiffworkflow-frontend/.gitignore index 8ff3e35ce..c0316f7ea 100644 --- a/spiffworkflow-frontend/.gitignore +++ b/spiffworkflow-frontend/.gitignore @@ -8,6 +8,9 @@ # testing /coverage +# in case we accidentally run backend tests in frontend. :D +/.coverage.* + # production /build diff --git a/spiffworkflow-frontend/cypress/e2e/process_models.cy.js b/spiffworkflow-frontend/cypress/e2e/process_models.cy.js index 4fd1b4810..43fba108e 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_models.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_models.cy.js @@ -1,4 +1,5 @@ import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; +import { miscDisplayName } from '../support/helpers'; describe('process-models', () => { beforeEach(() => { @@ -16,7 +17,7 @@ describe('process-models', () => { const modelDisplayName = `Test Model 2 ${id}`; const modelId = `test-model-2-${id}`; const newModelDisplayName = `${modelDisplayName} edited`; - cy.contains('99-Shared Resources').click(); + cy.contains(miscDisplayName).click(); cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); @@ -34,7 +35,7 @@ describe('process-models', () => { cy.contains(`Process Model: ${newModelDisplayName}`); // go back to process model show by clicking on the breadcrumb - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); @@ -46,6 +47,7 @@ describe('process-models', () => { `process-groups/${modifyProcessIdentifierForPathParam(groupId)}` ); cy.contains(modelId).should('not.exist'); + cy.contains(modelDisplayName).should('not.exist'); }); it('can create new bpmn, dmn, and json files', () => { @@ -61,11 +63,11 @@ describe('process-models', () => { const dmnFileName = `dmn_test_file_${id}`; const jsonFileName = `json_test_file_${id}`; - cy.contains('99-Shared Resources').click(); + cy.contains(miscDisplayName).click(); cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.contains(directParentGroupId).click(); + cy.contains(groupDisplayName).click(); cy.contains(modelDisplayName).click(); cy.url().should( 'include', @@ -90,7 +92,7 @@ describe('process-models', () => { cy.get('input[name=file_name]').type(bpmnFileName); cy.contains('Save Changes').click(); cy.contains(`Process Model File: ${bpmnFileName}`); - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); // cy.getBySel('files-accordion').click(); cy.contains(`${bpmnFileName}.bpmn`).should('exist'); @@ -108,7 +110,7 @@ describe('process-models', () => { cy.get('input[name=file_name]').type(dmnFileName); cy.contains('Save Changes').click(); cy.contains(`Process Model File: ${dmnFileName}`); - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); // cy.getBySel('files-accordion').click(); cy.contains(`${dmnFileName}.dmn`).should('exist'); @@ -124,7 +126,7 @@ describe('process-models', () => { cy.contains(`Process Model File: ${jsonFileName}`); // wait for json to load before clicking away to avoid network errors cy.wait(500); - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); // cy.getBySel('files-accordion').click(); cy.contains(`${jsonFileName}.json`).should('exist'); @@ -151,12 +153,12 @@ describe('process-models', () => { const modelDisplayName = `Test Model 2 ${id}`; const modelId = `test-model-2-${id}`; cy.contains('Add a process group'); - cy.contains('99-Shared Resources').click(); + cy.contains(miscDisplayName).click(); cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.contains(`${directParentGroupId}`).click(); + cy.contains(`${groupDisplayName}`).click(); cy.contains('Add a process model'); cy.contains(modelDisplayName).click(); cy.url().should( @@ -186,7 +188,7 @@ describe('process-models', () => { .click(); // in breadcrumb - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); @@ -203,7 +205,7 @@ describe('process-models', () => { // process models no longer has pagination post-tiles // it.only('can paginate items', () => { - // cy.contains('99-Shared Resources').click(); + // cy.contains(miscDisplayName).click(); // cy.wait(500); // cy.contains('Acceptance Tests Group One').click(); // cy.basicPaginationTest(); diff --git a/spiffworkflow-frontend/cypress/support/commands.js b/spiffworkflow-frontend/cypress/support/commands.js index f0034168c..f7c4e8467 100644 --- a/spiffworkflow-frontend/cypress/support/commands.js +++ b/spiffworkflow-frontend/cypress/support/commands.js @@ -1,5 +1,6 @@ import { string } from 'prop-types'; import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; +import { miscDisplayName } from './helpers'; // *********************************************** // This example commands.js shows you how to @@ -86,15 +87,15 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => { Cypress.Commands.add( 'runPrimaryBpmnFile', (expectAutoRedirectToHumanTask = false) => { - cy.contains('Run').click(); + cy.contains('Start').click(); if (expectAutoRedirectToHumanTask) { // the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress. cy.url().should('include', `/tasks/`); cy.contains('Task: '); } else { - cy.contains(/Process Instance.*kicked off/); + cy.contains(/Process Instance.*[kK]icked [oO]ff/); cy.reload(true); - cy.contains(/Process Instance.*kicked off/).should('not.exist'); + cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist'); } } ); @@ -103,8 +104,8 @@ Cypress.Commands.add( 'navigateToProcessModel', (groupDisplayName, modelDisplayName, modelIdentifier) => { cy.navigateToAdmin(); - cy.contains('99-Shared Resources').click(); - cy.contains(`Process Group: 99-Shared Resources`, { timeout: 10000 }); + cy.contains(miscDisplayName).click(); + cy.contains(`Process Group: ${miscDisplayName}`, { timeout: 10000 }); cy.contains(groupDisplayName).click(); cy.contains(`Process Group: ${groupDisplayName}`); // https://stackoverflow.com/q/51254946/6090676 diff --git a/spiffworkflow-frontend/cypress/support/helpers.js b/spiffworkflow-frontend/cypress/support/helpers.js new file mode 100644 index 000000000..b3ae449ed --- /dev/null +++ b/spiffworkflow-frontend/cypress/support/helpers.js @@ -0,0 +1 @@ +export const miscDisplayName = 'Shared Resources'; diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json index ba2339983..4ccea1922 100644 --- a/spiffworkflow-frontend/package-lock.json +++ b/spiffworkflow-frontend/package-lock.json @@ -68,7 +68,7 @@ "@cypress/grep": "^3.1.0", "@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/parser": "^5.30.6", - "cypress": "^10.8.0", + "cypress": "^12", "eslint": "^8.19.0", "eslint_d": "^12.2.0", "eslint-config-airbnb": "^19.0.4", @@ -9850,9 +9850,9 @@ "integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==" }, "node_modules/cypress": { - "version": "10.11.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz", - "integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz", + "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==", "dev": true, "hasInstallScript": true, "dependencies": { @@ -9903,7 +9903,7 @@ "cypress": "bin/cypress" }, "engines": { - "node": ">=12.0.0" + "node": "^14.0.0 || ^16.0.0 || >=18.0.0" } }, "node_modules/cypress/node_modules/@types/node": { @@ -38586,9 +38586,9 @@ "integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==" }, "cypress": { - "version": "10.11.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz", - "integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz", + "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==", "dev": true, "requires": { "@cypress/request": "^2.88.10", diff --git a/spiffworkflow-frontend/package.json b/spiffworkflow-frontend/package.json index b896bdcec..6a84cea9e 100644 --- a/spiffworkflow-frontend/package.json +++ b/spiffworkflow-frontend/package.json @@ -104,7 +104,7 @@ "@cypress/grep": "^3.1.0", "@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/parser": "^5.30.6", - "cypress": "^10.8.0", + "cypress": "^12", "eslint": "^8.19.0", "eslint_d": "^12.2.0", "eslint-config-airbnb": "^19.0.4", diff --git a/spiffworkflow-frontend/src/App.tsx b/spiffworkflow-frontend/src/App.tsx index deb38410d..6357a713f 100644 --- a/spiffworkflow-frontend/src/App.tsx +++ b/spiffworkflow-frontend/src/App.tsx @@ -13,6 +13,7 @@ import AdminRoutes from './routes/AdminRoutes'; import { ErrorForDisplay } from './interfaces'; import { AbilityContext } from './contexts/Can'; +import UserService from './services/UserService'; export default function App() { const [errorMessage, setErrorMessage] = useState( @@ -24,6 +25,11 @@ export default function App() { [errorMessage] ); + if (!UserService.isLoggedIn()) { + UserService.doLogin(); + return null; + } + const ability = defineAbility(() => {}); let errorTag = null; diff --git a/spiffworkflow-frontend/src/components/NavigationBar.tsx b/spiffworkflow-frontend/src/components/NavigationBar.tsx index 47e0de998..7a0ffd3ea 100644 --- a/spiffworkflow-frontend/src/components/NavigationBar.tsx +++ b/spiffworkflow-frontend/src/components/NavigationBar.tsx @@ -24,6 +24,7 @@ import UserService from '../services/UserService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { PermissionsToCheck } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; +import { UnauthenticatedError } from '../services/HttpService'; // for ref: https://react-bootstrap.github.io/components/navbar/ export default function NavigationBar() { @@ -39,6 +40,11 @@ export default function NavigationBar() { const [activeKey, setActiveKey] = useState(''); const { targetUris } = useUriListForPermissions(); + + // App.jsx forces login (which redirects to keycloak) so we should never get here if we're not logged in. + if (!UserService.isLoggedIn()) { + throw new UnauthenticatedError('You must be authenticated to do this.'); + } const permissionRequestData: PermissionsToCheck = { [targetUris.authenticationListPath]: ['GET'], [targetUris.messageInstanceListPath]: ['GET'], @@ -135,6 +141,9 @@ export default function NavigationBar() { }; const headerMenuItems = () => { + if (!UserService.isLoggedIn()) { + return null; + } return ( <> diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 5e62fcf0c..06f7793c4 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -306,8 +306,13 @@ export default function ProcessInstanceListTable({ checkFiltersAndRun(); if (autoReload) { - refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, checkFiltersAndRun); + return refreshAtInterval( + REFRESH_INTERVAL, + REFRESH_TIMEOUT, + checkFiltersAndRun + ); } + return undefined; }, [ autoReload, searchParams, @@ -845,8 +850,8 @@ export default function ProcessInstanceListTable({ return null; }} shouldFilterItem={shouldFilterReportColumn} - placeholder="Choose a report column" - titleText="Report Column" + placeholder="Choose a column to show" + titleText="Column" /> ); } @@ -895,7 +900,7 @@ export default function ProcessInstanceListTable({ kind="ghost" size="sm" className={`button-tag-icon ${tagTypeClass}`} - title={`Edit ${reportColumnForEditing.accessor}`} + title={`Edit ${reportColumnForEditing.accessor} column`} onClick={() => { setReportColumnToOperateOn(reportColumnForEditing); setShowReportColumnForm(true); @@ -923,7 +928,7 @@ export default function ProcessInstanceListTable({ + + {canViewXml && ( + + )} + ); } diff --git a/spiffworkflow-frontend/src/helpers.tsx b/spiffworkflow-frontend/src/helpers.tsx index 6781ada97..8f6255335 100644 --- a/spiffworkflow-frontend/src/helpers.tsx +++ b/spiffworkflow-frontend/src/helpers.tsx @@ -208,5 +208,29 @@ export const refreshAtInterval = ( () => clearInterval(intervalRef), timeout * 1000 ); - return [intervalRef, timeoutRef]; + return () => { + clearInterval(intervalRef); + clearTimeout(timeoutRef); + }; +}; + +const getChildProcesses = (bpmnElement: any) => { + let elements: string[] = []; + bpmnElement.children.forEach((c: any) => { + if (c.type === 'bpmn:Participant') { + if (c.businessObject.processRef) { + elements.push(c.businessObject.processRef.id); + } + elements = [...elements, ...getChildProcesses(c)]; + } else if (c.type === 'bpmn:SubProcess') { + elements.push(c.id); + } + }); + return elements; +}; + +export const getBpmnProcessIdentifiers = (rootBpmnElement: any) => { + const childProcesses = getChildProcesses(rootBpmnElement); + childProcesses.push(rootBpmnElement.businessObject.id); + return childProcesses; }; diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index f84465c82..4ba04352b 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -14,7 +14,8 @@ export const useUriListForPermissions = () => { processInstanceListPath: '/v1.0/process-instances', processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', - processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index cc3180e55..b0ab6208a 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -12,13 +12,16 @@ export interface RecentProcessModel { } export interface ProcessInstanceTask { - id: number; + id: string; process_model_display_name: string; process_model_identifier: string; task_title: string; lane_assignment_id: string; process_instance_status: number; updated_at_in_seconds: number; + state: string; + process_identifier: string; + name: string; } export interface ProcessReference { @@ -49,6 +52,7 @@ export interface ProcessInstance { id: number; process_model_identifier: string; process_model_display_name: string; + spiff_step?: number; } export interface MessageCorrelationProperties { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx index b6c08b213..1d75db565 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx @@ -21,10 +21,11 @@ export default function ProcessInstanceList() { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 9a0495d1d..1adb585bf 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -1,6 +1,11 @@ import { useContext, useEffect, useState } from 'react'; import Editor from '@monaco-editor/react'; -import { useParams, useNavigate, Link } from 'react-router-dom'; +import { + useParams, + useNavigate, + Link, + useSearchParams, +} from 'react-router-dom'; import { TrashCan, StopOutline, @@ -34,15 +39,21 @@ import { import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; import ErrorContext from '../contexts/ErrorContext'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; -import { PermissionsToCheck } from '../interfaces'; +import { + PermissionsToCheck, + ProcessInstance, + ProcessInstanceTask, +} from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; export default function ProcessInstanceShow() { const navigate = useNavigate(); const params = useParams(); + const [searchParams] = useSearchParams(); - const [processInstance, setProcessInstance] = useState(null); - const [tasks, setTasks] = useState | null>(null); + const [processInstance, setProcessInstance] = + useState(null); + const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); const [taskToDisplay, setTaskToDisplay] = useState(null); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); @@ -59,8 +70,10 @@ export default function ProcessInstanceShow() { const permissionRequestData: PermissionsToCheck = { [targetUris.messageInstanceListPath]: ['GET'], [targetUris.processInstanceTaskListPath]: ['GET'], + [targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'], [targetUris.processInstanceActionPath]: ['DELETE'], [targetUris.processInstanceLogListPath]: ['GET'], + [targetUris.processModelShowPath]: ['PUT'], [`${targetUris.processInstanceActionPath}/suspend`]: ['PUT'], [`${targetUris.processInstanceActionPath}/terminate`]: ['PUT'], [`${targetUris.processInstanceActionPath}/resume`]: ['PUT'], @@ -80,17 +93,28 @@ export default function ProcessInstanceShow() { const processTaskFailure = () => { setTasksCallHadError(true); }; + let queryParams = ''; + const processIdentifier = searchParams.get('process_identifier'); + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } HttpService.makeCallToBackend({ - path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}`, + path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, successCallback: setProcessInstance, }); let taskParams = '?all_tasks=true'; if (typeof params.spiff_step !== 'undefined') { taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; } - if (ability.can('GET', targetUris.processInstanceTaskListPath)) { + let taskPath = ''; + if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) { + taskPath = `${targetUris.processInstanceTaskListDataPath}${taskParams}`; + } else if (ability.can('GET', targetUris.processInstanceTaskListPath)) { + taskPath = `${targetUris.processInstanceTaskListPath}${taskParams}`; + } + if (taskPath) { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskListPath}${taskParams}`, + path: taskPath, successCallback: setTasks, failureCallback: processTaskFailure, }); @@ -98,7 +122,14 @@ export default function ProcessInstanceShow() { setTasksCallHadError(true); } } - }, [params, modifiedProcessModelId, permissionsLoaded, ability, targetUris]); + }, [ + params, + modifiedProcessModelId, + permissionsLoaded, + ability, + targetUris, + searchParams, + ]); const deleteProcessInstance = () => { HttpService.makeCallToBackend({ @@ -140,12 +171,12 @@ export default function ProcessInstanceShow() { const getTaskIds = () => { const taskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { - tasks.forEach(function getUserTasksElement(task: any) { + tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) { if (task.state === 'COMPLETED') { - (taskIds.completed as any).push(task.name); + (taskIds.completed as any).push(task); } if (task.state === 'READY' || task.state === 'WAITING') { - (taskIds.readyOrWaiting as any).push(task.name); + (taskIds.readyOrWaiting as any).push(task); } }); } @@ -175,15 +206,18 @@ export default function ProcessInstanceShow() { label: any, distance: number ) => { + const processIdentifier = searchParams.get('process_identifier'); + let queryParams = ''; + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } return ( {label} @@ -364,10 +398,15 @@ export default function ProcessInstanceShow() { } }; - const handleClickedDiagramTask = (shapeElement: any) => { + const handleClickedDiagramTask = ( + shapeElement: any, + bpmnProcessIdentifiers: any + ) => { if (tasks) { const matchingTask: any = tasks.find( - (task: any) => task.name === shapeElement.id + (task: any) => + task.name === shapeElement.id && + bpmnProcessIdentifiers.includes(task.process_identifier) ); if (matchingTask) { setTaskToDisplay(matchingTask); @@ -411,7 +450,9 @@ export default function ProcessInstanceShow() { const canEditTaskData = (task: any) => { return ( - task.state === 'READY' && showingLastSpiffStep(processInstance as any) + ability.can('PUT', targetUris.processInstanceTaskListDataPath) && + task.state === 'READY' && + showingLastSpiffStep(processInstance as any) ); }; @@ -460,7 +501,10 @@ export default function ProcessInstanceShow() { const taskDataButtons = (task: any) => { const buttons = []; - if (task.type === 'Script Task') { + if ( + task.type === 'Script Task' && + ability.can('PUT', targetUris.processModelShowPath) + ) { buttons.push( ); buttons.push( + ) : null} { setTask(result); - if (ability.can('GET', targetUris.processInstanceTaskListPath)) { + if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) { HttpService.makeCallToBackend({ path: `/task-data/${modifyProcessIdentifierForPathParam( result.process_model_identifier diff --git a/spiffworkflow-frontend/src/services/HttpService.ts b/spiffworkflow-frontend/src/services/HttpService.ts index 119765a7b..78a29d07e 100644 --- a/spiffworkflow-frontend/src/services/HttpService.ts +++ b/spiffworkflow-frontend/src/services/HttpService.ts @@ -26,7 +26,7 @@ type backendCallProps = { postBody?: any; }; -class UnauthenticatedError extends Error { +export class UnauthenticatedError extends Error { constructor(message: string) { super(message); this.name = 'UnauthenticatedError'; diff --git a/spiffworkflow-frontend/src/services/UserService.ts b/spiffworkflow-frontend/src/services/UserService.ts index 84e84d6f4..df0f213e5 100644 --- a/spiffworkflow-frontend/src/services/UserService.ts +++ b/spiffworkflow-frontend/src/services/UserService.ts @@ -27,8 +27,8 @@ const doLogout = () => { const idToken = getIdToken(); localStorage.removeItem('jwtAccessToken'); localStorage.removeItem('jwtIdToken'); - const redirctUrl = `${window.location.origin}/`; - const url = `${BACKEND_BASE_URL}/logout?redirect_url=${redirctUrl}&id_token=${idToken}`; + const redirectUrl = `${window.location.origin}`; + const url = `${BACKEND_BASE_URL}/logout?redirect_url=${redirectUrl}&id_token=${idToken}`; window.location.href = url; };