Merge branch 'main' into feature/waku-fault-message
This commit is contained in:
commit
8aff86a53c
|
@ -215,11 +215,16 @@ jobs:
|
|||
with:
|
||||
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||
fetch-depth: 0
|
||||
- name: Checkout Samples
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: sartography/sample-process-models
|
||||
path: sample-process-models
|
||||
- name: start_backend
|
||||
run: ./bin/build_and_run_with_docker_compose
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA: "true"
|
||||
SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP: "false"
|
||||
- name: wait_for_backend
|
||||
run: ./bin/wait_for_server_to_be_up 5
|
||||
|
||||
|
|
21
README.md
21
README.md
|
@ -1,5 +1,12 @@
|
|||
# spiff-arena
|
||||
|
||||
SpiffArena is a low(ish)-code software development platform for building, running, and monitoring executable diagrams. It is intended to support Citizen Developers and help increase their ability to contribute to the software development process. Using tools that look at lot like flow-charts and spreadsheets, it is possible to design some complex rules in a way that everyone in an organization can see and understand - and that are directly executable.
|
||||
|
||||
Please visit the [SpiffWorkflow website](https://www.spiffworkflow.org) for a [Getting Started Guide](https://www.spiffworkflow.org/posts/articles/get_started/) on how to run SpiffArena locally and try it out. There are also
|
||||
additional articles, videos, and tutorials about SpiffArena and it's components - SpiffWorkflow, Service Connectors, and BPMN.js extensions.
|
||||
|
||||
# Contributing
|
||||
|
||||
This is a monorepo based on git subtrees that pulls together various
|
||||
spiffworkflow-related projects. Here's an example command to push back to one
|
||||
project:
|
||||
|
@ -15,3 +22,17 @@ Requires at root:
|
|||
- .flake8
|
||||
- .pre-commit-config.yaml
|
||||
- pyproject.toml
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
SpiffArena's main components under published under the terms of the
|
||||
`GNU Lesser General Public License (LGPL) Version 3 <https://www.gnu.org/licenses/lgpl-3.0.txt>`_.
|
||||
|
||||
Support
|
||||
-------
|
||||
You can find us on `our Discord Channel <https://discord.gg/BYHcc7PpUC>`_
|
||||
|
||||
Commercial support for SpiffWorkflow is available from
|
||||
`Sartography <https://sartography.com>`_
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ class BpmnWorkflowSerializer:
|
|||
|
||||
# This is the default version set on the workflow, it can be overwritten
|
||||
# using the configure_workflow_spec_converter.
|
||||
VERSION = "1.0"
|
||||
VERSION = "1.1"
|
||||
VERSION_KEY = "serializer_version"
|
||||
DEFAULT_JSON_ENCODER_CLS = None
|
||||
DEFAULT_JSON_DECODER_CLS = None
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from copy import deepcopy
|
||||
import json
|
||||
from SpiffWorkflow.bpmn.specs.ServiceTask import ServiceTask
|
||||
from SpiffWorkflow.spiff.specs.spiff_task import SpiffBpmnTask
|
||||
|
@ -27,7 +28,8 @@ class ServiceTask(SpiffBpmnTask, ServiceTask):
|
|||
param['value'] = task.workflow.script_engine.evaluate(task, param['value'])
|
||||
return param
|
||||
|
||||
evaluated_params = {k: evaluate(v) for k, v in self.operation_params.items()}
|
||||
operation_params_copy = deepcopy(self.operation_params)
|
||||
evaluated_params = {k: evaluate(v) for k, v in operation_params_copy.items()}
|
||||
|
||||
result = task.workflow.script_engine.call_service(self.operation_name,
|
||||
evaluated_params, task.data)
|
||||
|
|
|
@ -18,6 +18,7 @@ What is SpiffWorkflow?
|
|||
======================
|
||||
.. image:: images/logo.png
|
||||
:align: center
|
||||
:target: https://www.spiffworkflow.org
|
||||
|
||||
SpiffWorkflow allows your python application to process BPMN diagrams (think
|
||||
of them as very powerful flow charts, See :doc:`intro`.) to accomplish
|
||||
|
@ -28,6 +29,11 @@ code. You can use these diagrams to accomplish a number of tasks, such as:
|
|||
- Implement an approval process that requires input from multiple users
|
||||
- Allow non-programmers to modify the flow and behavior of your application.
|
||||
|
||||
Please visit `SpiffWorkflow.org <https://www.spiffworkflow.org>`_ for
|
||||
additional articles, videos, and tutorials about SpiffWorkflow and its
|
||||
related projects including SpiffArena, Service Connectors, and BPMN.js
|
||||
extensions.
|
||||
|
||||
License
|
||||
-------
|
||||
Spiff Workflow is published under the terms of the
|
||||
|
|
|
@ -60,6 +60,15 @@ class ServiceTaskTest(BaseTestCase):
|
|||
self.workflow.do_engine_steps()
|
||||
self._assert_service_tasks()
|
||||
|
||||
def testRunSameServiceTaskActivityMultipleTimes(self):
|
||||
self.workflow.do_engine_steps()
|
||||
service_task_activity = [t for t in self.workflow.get_tasks() if
|
||||
t.task_spec.name == 'Activity-1inxqgx'][0]
|
||||
|
||||
service_task_activity.task_spec._execute(service_task_activity)
|
||||
service_task_activity.task_spec._execute(service_task_activity)
|
||||
service_task_activity.task_spec._execute(service_task_activity)
|
||||
|
||||
def testRunThroughSaveRestore(self):
|
||||
self.save_restore()
|
||||
# Engine isn't preserved through save/restore, so we have to reset it.
|
||||
|
|
|
@ -7,10 +7,8 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models"
|
||||
fi
|
||||
BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
pushd "$BPMN_SPEC_ABSOLUTE_DIR" >/dev/null 2>&1
|
||||
if [[ "$(git rev-parse --abbrev-ref HEAD)" == "main" ]]; then
|
||||
>&2 echo "ERROR: please do not use the main branch of sample-process-models. use dev"
|
||||
exit 1
|
||||
fi
|
||||
popd >/dev/null 2>&1
|
||||
fi
|
||||
|
||||
realpath "$BPMN_SPEC_ABSOLUTE_DIR"
|
|
@ -17,19 +17,8 @@ if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
|
|||
export SPIFFWORKFLOW_BACKEND_ENV=development
|
||||
fi
|
||||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
fi
|
||||
BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
|
||||
export FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||
export APPLICATION_ROOT="/"
|
||||
|
|
|
@ -9,6 +9,7 @@ def main() -> None:
|
|||
"""Main."""
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
print("HEY")
|
||||
failing_process_models = DataSetupService.save_all_process_models()
|
||||
for bpmn_errors in failing_process_models:
|
||||
print(bpmn_errors)
|
||||
|
|
|
@ -654,7 +654,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
|
||||
resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1"
|
||||
|
||||
[[package]]
|
||||
name = "Flask-Cors"
|
||||
|
@ -1851,7 +1851,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
|
||||
resolved_reference = "5eed83ab12f67c01c7836424a22fc425a33fc55d"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -2222,7 +2222,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<3.12"
|
||||
content-hash = "bbbd1c8bdce7f3dd7ec17c62b85dc7c95045fe500a759bb1a89c93add58a2a25"
|
||||
content-hash = "832c1b6cd8d9aebc8529fdce11167bddcb3634fd0767dd2e490b74ababcf2714"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2563,7 +2563,6 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
||||
|
@ -2572,7 +2571,6 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
||||
|
@ -2581,7 +2579,6 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
||||
|
@ -2880,7 +2877,10 @@ orjson = [
|
|||
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
|
||||
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
|
||||
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
|
||||
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
|
||||
|
@ -2989,18 +2989,7 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
|
|
|
@ -79,7 +79,7 @@ pytest = "*"
|
|||
coverage = {extras = ["toml"], version = "^6.1"}
|
||||
safety = "^2.3.1"
|
||||
mypy = ">=0.961"
|
||||
typeguard = "^2.13.2"
|
||||
typeguard = "^2"
|
||||
xdoctest = {extras = ["colors"], version = "^1.0.1"}
|
||||
sphinx = "^5.0.2"
|
||||
sphinx-autobuild = ">=2021.3.14"
|
||||
|
|
|
@ -628,6 +628,12 @@ paths:
|
|||
description: The identifier of the group to get the process instances for
|
||||
schema:
|
||||
type: string
|
||||
- name: process_initiator_username
|
||||
in: query
|
||||
required: false
|
||||
description: The username of the process initiator
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me
|
||||
summary: Returns a list of process instances that are associated with me.
|
||||
|
@ -741,6 +747,12 @@ paths:
|
|||
description: The identifier of the group to get the process instances for
|
||||
schema:
|
||||
type: string
|
||||
- name: process_initiator_username
|
||||
in: query
|
||||
required: false
|
||||
description: The username of the process initiator
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list
|
||||
summary: Returns a list of process instances.
|
||||
|
@ -946,6 +958,27 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/process-instances/find-by-id/{process_instance_id}:
|
||||
parameters:
|
||||
- name: process_instance_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process instance.
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_find_by_id
|
||||
summary: Find a process instance based on its id only
|
||||
tags:
|
||||
- Process Instances
|
||||
responses:
|
||||
"200":
|
||||
description: One Process Instance
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/process-instances/{modified_process_model_identifier}/{process_instance_id}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
|
@ -1787,7 +1820,7 @@ paths:
|
|||
post:
|
||||
tags:
|
||||
- Messages
|
||||
operationId: spiffworkflow_backend.routes.messages_controller.message_start
|
||||
operationId: spiffworkflow_backend.routes.messages_controller.message_send
|
||||
summary: Instantiate and run a given process model with a message start event matching given identifier
|
||||
requestBody:
|
||||
content:
|
||||
|
|
|
@ -30,6 +30,12 @@ permissions:
|
|||
allowed_permissions: [read]
|
||||
uri: /*
|
||||
|
||||
process-instances-find-by-id:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/find-by-id/*
|
||||
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
|
|
|
@ -54,10 +54,21 @@ class ProcessModelInfo:
|
|||
return False
|
||||
|
||||
# for use with os.path.join so it can work on windows
|
||||
# NOTE: in APIs, ids should always have forward slashes, even in windows.
|
||||
# this is because we have to store ids in the database, and we want the same
|
||||
# database snapshot to work on any OS.
|
||||
def id_for_file_path(self) -> str:
|
||||
"""Id_for_file_path."""
|
||||
return self.id.replace("/", os.sep)
|
||||
|
||||
@classmethod
|
||||
def modify_process_identifier_for_path_param(cls, identifier: str) -> str:
|
||||
"""Identifier."""
|
||||
if "\\" in identifier:
|
||||
raise Exception(f"Found backslash in identifier: {identifier}")
|
||||
|
||||
return identifier.replace("/", ":")
|
||||
|
||||
|
||||
class ProcessModelInfoSchema(Schema):
|
||||
"""ProcessModelInfoSchema."""
|
||||
|
|
|
@ -19,6 +19,7 @@ from spiffworkflow_backend.models.message_triggerable_process_model import (
|
|||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
|
@ -90,7 +91,7 @@ def message_instance_list(
|
|||
# payload: dict,
|
||||
# process_instance_id: Optional[int],
|
||||
# }
|
||||
def message_start(
|
||||
def message_send(
|
||||
message_identifier: str,
|
||||
body: Dict[str, Any],
|
||||
) -> flask.wrappers.Response:
|
||||
|
@ -121,6 +122,26 @@ def message_start(
|
|||
body["process_instance_id"]
|
||||
)
|
||||
|
||||
if process_instance.status == ProcessInstanceStatus.suspended.value:
|
||||
raise ApiError(
|
||||
error_code="process_instance_is_suspended",
|
||||
message=(
|
||||
f"Process Instance '{process_instance.id}' is suspended and cannot"
|
||||
" accept messages.'"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
if process_instance.status == ProcessInstanceStatus.terminated.value:
|
||||
raise ApiError(
|
||||
error_code="process_instance_is_terminated",
|
||||
message=(
|
||||
f"Process Instance '{process_instance.id}' is terminated and cannot"
|
||||
" accept messages.'"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
message_instance = MessageInstanceModel.query.filter_by(
|
||||
process_instance_id=process_instance.id,
|
||||
message_model_id=message_model.id,
|
||||
|
|
|
@ -31,6 +31,7 @@ from spiffworkflow_backend.models.process_instance_metadata import (
|
|||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
|
@ -43,6 +44,7 @@ from spiffworkflow_backend.routes.process_api_blueprint import _get_process_mode
|
|||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||
from spiffworkflow_backend.services.git_service import GitCommandError
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
|
@ -88,9 +90,7 @@ def process_instance_run(
|
|||
do_engine_steps: bool = True,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_run."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
if process_instance.status != "not_started":
|
||||
raise ApiError(
|
||||
error_code="process_instance_not_runnable",
|
||||
|
@ -138,9 +138,7 @@ def process_instance_terminate(
|
|||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_run."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.terminate()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
@ -151,9 +149,7 @@ def process_instance_suspend(
|
|||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_suspend."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.suspend()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
@ -164,9 +160,7 @@ def process_instance_resume(
|
|||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_resume."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.resume()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
@ -187,7 +181,12 @@ def process_instance_log_list(
|
|||
SpiffLoggingModel.process_instance_id == process_instance.id
|
||||
)
|
||||
if not detailed:
|
||||
log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
|
||||
log_query = log_query.filter(
|
||||
or_(
|
||||
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||
SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
logs = (
|
||||
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
|
||||
|
@ -225,6 +224,7 @@ def process_instance_list_for_me(
|
|||
report_identifier: Optional[str] = None,
|
||||
report_id: Optional[int] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
process_initiator_username: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list_for_me."""
|
||||
return process_instance_list(
|
||||
|
@ -258,6 +258,7 @@ def process_instance_list(
|
|||
report_identifier: Optional[str] = None,
|
||||
report_id: Optional[int] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
process_initiator_username: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list."""
|
||||
process_instance_report = ProcessInstanceReportService.report_with_identifier(
|
||||
|
@ -274,6 +275,7 @@ def process_instance_list(
|
|||
end_to=end_to,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_status=process_status.split(",") if process_status else None,
|
||||
process_initiator_username=process_initiator_username,
|
||||
)
|
||||
else:
|
||||
report_filter = (
|
||||
|
@ -287,6 +289,7 @@ def process_instance_list(
|
|||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_initiator_username=process_initiator_username,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -554,6 +557,10 @@ def process_instance_task_list(
|
|||
spiff_tasks = processor.get_all_user_tasks()
|
||||
|
||||
subprocesses_by_child_task_ids = processor.get_subprocesses_by_child_task_ids()
|
||||
processor.get_highest_level_subprocesses_by_child_task_ids(
|
||||
subprocesses_by_child_task_ids
|
||||
)
|
||||
|
||||
tasks = []
|
||||
for spiff_task in spiff_tasks:
|
||||
calling_subprocess_task_id = subprocesses_by_child_task_ids.get(
|
||||
|
@ -575,14 +582,43 @@ def process_instance_reset(
|
|||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Reset a process instance to a particular step."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.reset_process(spiff_step)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_find_by_id(
|
||||
process_instance_id: int,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_find_by_id."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
modified_process_model_identifier = (
|
||||
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_instance.process_model_identifier
|
||||
)
|
||||
)
|
||||
process_instance_uri = (
|
||||
f"/process-instances/{modified_process_model_identifier}/{process_instance.id}"
|
||||
)
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=g.user,
|
||||
permission="read",
|
||||
target_uri=process_instance_uri,
|
||||
)
|
||||
|
||||
uri_type = None
|
||||
if not has_permission:
|
||||
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
|
||||
uri_type = "for-me"
|
||||
|
||||
response_json = {
|
||||
"process_instance": process_instance,
|
||||
"uri_type": uri_type,
|
||||
}
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def _get_process_instance(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
|
|
|
@ -31,6 +31,9 @@ from spiffworkflow_backend.routes.process_api_blueprint import (
|
|||
)
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.git_service import MissingGitConfigsError
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportService,
|
||||
)
|
||||
|
@ -39,7 +42,7 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
|||
|
||||
|
||||
def process_model_create(
|
||||
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
|
||||
modified_process_group_id: str, body: Dict[str, Union[str, bool, int, None, list]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_create."""
|
||||
body_include_list = [
|
||||
|
@ -92,7 +95,8 @@ def process_model_delete(
|
|||
|
||||
|
||||
def process_model_update(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
modified_process_model_identifier: str,
|
||||
body: Dict[str, Union[str, bool, int, None, list]],
|
||||
) -> Any:
|
||||
"""Process_model_update."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
|
@ -441,6 +445,10 @@ def process_model_create_with_natural_language(
|
|||
default_report_metadata = ProcessInstanceReportService.system_metadata_map(
|
||||
"default"
|
||||
)
|
||||
if default_report_metadata is None:
|
||||
raise ProcessInstanceReportNotFoundError(
|
||||
"Could not find a report with identifier 'default'"
|
||||
)
|
||||
for column in columns:
|
||||
default_report_metadata["columns"].append(
|
||||
{"Header": column, "accessor": column, "filterable": True}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
|
@ -15,9 +14,10 @@ from spiffworkflow_backend.services.secret_service import SecretService
|
|||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
def secret_show(key: str) -> Optional[str]:
|
||||
def secret_show(key: str) -> Response:
|
||||
"""Secret_show."""
|
||||
return SecretService.get_secret(key)
|
||||
secret = SecretService.get_secret(key)
|
||||
return make_response(jsonify(secret), 200)
|
||||
|
||||
|
||||
def secret_list(
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
"""Get_secret."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
|
||||
|
||||
class GetSecret(Script):
|
||||
"""GetSecret."""
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Returns the value for a previously configured secret."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
return SecretService.get_secret(args[0]).value
|
|
@ -624,6 +624,84 @@ class AuthorizationService:
|
|||
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def set_basic_permissions(cls) -> list[PermissionToAssign]:
|
||||
"""Set_basic_permissions."""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/process-instances/for-me"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/processes")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/service-tasks")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/user-groups/for-current-user"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/process-instances/find-by-id/*"
|
||||
)
|
||||
)
|
||||
|
||||
for permission in ["create", "read", "update", "delete"]:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission=permission, target_uri="/process-instances/reports/*"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri="/tasks/*")
|
||||
)
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def set_process_group_permissions(
|
||||
cls, target: str, permission_set: str
|
||||
) -> list[PermissionToAssign]:
|
||||
"""Set_process_group_permissions."""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
process_group_identifier = (
|
||||
target.removeprefix("PG:").replace("/", ":").removeprefix(":")
|
||||
)
|
||||
process_related_path_segment = f"{process_group_identifier}:*"
|
||||
if process_group_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
target_uris = [
|
||||
f"/process-groups/{process_related_path_segment}",
|
||||
f"/process-models/{process_related_path_segment}",
|
||||
]
|
||||
permissions_to_assign = permissions_to_assign + cls.get_permissions_to_assign(
|
||||
permission_set, process_related_path_segment, target_uris
|
||||
)
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def set_process_model_permissions(
|
||||
cls, target: str, permission_set: str
|
||||
) -> list[PermissionToAssign]:
|
||||
"""Set_process_model_permissions."""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
process_model_identifier = (
|
||||
target.removeprefix("PM:").replace("/", ":").removeprefix(":")
|
||||
)
|
||||
process_related_path_segment = f"{process_model_identifier}/*"
|
||||
|
||||
if process_model_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
|
||||
target_uris = [f"/process-models/{process_related_path_segment}"]
|
||||
permissions_to_assign = permissions_to_assign + cls.get_permissions_to_assign(
|
||||
permission_set, process_related_path_segment, target_uris
|
||||
)
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def explode_permissions(
|
||||
cls, permission_set: str, target: str
|
||||
|
@ -654,72 +732,20 @@ class AuthorizationService:
|
|||
permissions = ["create", "read", "update", "delete"]
|
||||
|
||||
if target.startswith("PG:"):
|
||||
process_group_identifier = (
|
||||
target.removeprefix("PG:").replace("/", ":").removeprefix(":")
|
||||
permissions_to_assign += cls.set_process_group_permissions(
|
||||
target, permission_set
|
||||
)
|
||||
process_related_path_segment = f"{process_group_identifier}:*"
|
||||
if process_group_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
target_uris = [
|
||||
f"/process-groups/{process_related_path_segment}",
|
||||
f"/process-models/{process_related_path_segment}",
|
||||
]
|
||||
permissions_to_assign = (
|
||||
permissions_to_assign
|
||||
+ cls.get_permissions_to_assign(
|
||||
permission_set, process_related_path_segment, target_uris
|
||||
)
|
||||
)
|
||||
|
||||
elif target.startswith("PM:"):
|
||||
process_model_identifier = (
|
||||
target.removeprefix("PM:").replace("/", ":").removeprefix(":")
|
||||
permissions_to_assign += cls.set_process_model_permissions(
|
||||
target, permission_set
|
||||
)
|
||||
process_related_path_segment = f"{process_model_identifier}/*"
|
||||
|
||||
if process_model_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
|
||||
target_uris = [f"/process-models/{process_related_path_segment}"]
|
||||
permissions_to_assign = (
|
||||
permissions_to_assign
|
||||
+ cls.get_permissions_to_assign(
|
||||
permission_set, process_related_path_segment, target_uris
|
||||
)
|
||||
)
|
||||
|
||||
elif permission_set == "start":
|
||||
raise InvalidPermissionError(
|
||||
"Permission 'start' is only available for macros PM and PG."
|
||||
)
|
||||
|
||||
elif target.startswith("BASIC"):
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/process-instances/for-me"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/processes")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/service-tasks")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/user-groups/for-current-user"
|
||||
)
|
||||
)
|
||||
|
||||
for permission in ["create", "read", "update", "delete"]:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission=permission, target_uri="/process-instances/reports/*"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri="/tasks/*")
|
||||
)
|
||||
permissions_to_assign += cls.set_basic_permissions()
|
||||
elif target == "ALL":
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
|
|
|
@ -50,8 +50,6 @@ class DataSetupService:
|
|||
)
|
||||
)
|
||||
|
||||
current_app.logger.debug(
|
||||
"DataSetupService.save_all_process_models() end"
|
||||
)
|
||||
current_app.logger.debug("DataSetupService.save_all_process_models() end")
|
||||
db.session.commit()
|
||||
return failing_process_models
|
||||
|
|
|
@ -646,6 +646,23 @@ class ProcessInstanceProcessor:
|
|||
subprocesses_by_child_task_ids[task_id] = subprocess_id
|
||||
return subprocesses_by_child_task_ids
|
||||
|
||||
def get_highest_level_subprocesses_by_child_task_ids(
|
||||
self, subprocesses_by_child_task_ids: dict
|
||||
) -> dict:
|
||||
"""Ensure task ids point to the top level subprocess id.
|
||||
|
||||
This is done by checking if a subprocess is also a task until the subprocess is no longer a task.
|
||||
"""
|
||||
for task_id, subprocess_id in subprocesses_by_child_task_ids.items():
|
||||
if subprocess_id in subprocesses_by_child_task_ids:
|
||||
subprocesses_by_child_task_ids[task_id] = (
|
||||
subprocesses_by_child_task_ids[subprocess_id]
|
||||
)
|
||||
self.get_highest_level_subprocesses_by_child_task_ids(
|
||||
subprocesses_by_child_task_ids
|
||||
)
|
||||
return subprocesses_by_child_task_ids
|
||||
|
||||
def save(self) -> None:
|
||||
"""Saves the current state of this processor to the database."""
|
||||
self.process_instance_model.bpmn_json = self.serialize()
|
||||
|
|
|
@ -3,10 +3,11 @@ import re
|
|||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Type
|
||||
|
||||
import sqlalchemy
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import or_
|
||||
|
@ -28,6 +29,10 @@ from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignme
|
|||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
class ProcessInstanceReportNotFoundError(Exception):
|
||||
"""ProcessInstanceReportNotFoundError."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessInstanceReportFilter:
|
||||
"""ProcessInstanceReportFilter."""
|
||||
|
@ -44,6 +49,7 @@ class ProcessInstanceReportFilter:
|
|||
with_tasks_completed_by_me: Optional[bool] = None
|
||||
with_tasks_assigned_to_my_group: Optional[bool] = None
|
||||
with_relation_to_me: Optional[bool] = None
|
||||
process_initiator_username: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> dict[str, str]:
|
||||
"""To_dict."""
|
||||
|
@ -77,6 +83,8 @@ class ProcessInstanceReportFilter:
|
|||
).lower()
|
||||
if self.with_relation_to_me is not None:
|
||||
d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
|
||||
if self.process_initiator_username is not None:
|
||||
d["process_initiator_username"] = str(self.process_initiator_username)
|
||||
|
||||
return d
|
||||
|
||||
|
@ -85,7 +93,7 @@ class ProcessInstanceReportService:
|
|||
"""ProcessInstanceReportService."""
|
||||
|
||||
@classmethod
|
||||
def system_metadata_map(cls, metadata_key: str) -> dict[str, Any]:
|
||||
def system_metadata_map(cls, metadata_key: str) -> Optional[dict[str, Any]]:
|
||||
"""System_metadata_map."""
|
||||
# TODO replace with system reports that are loaded on launch (or similar)
|
||||
temp_system_metadata_map = {
|
||||
|
@ -106,16 +114,16 @@ class ProcessInstanceReportService:
|
|||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
"filter_by": [
|
||||
{"field_name": "initiated_by_me", "field_value": True},
|
||||
{"field_name": "has_terminal_status", "field_value": True},
|
||||
{"field_name": "initiated_by_me", "field_value": "true"},
|
||||
{"field_name": "has_terminal_status", "field_value": "true"},
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
"system_report_completed_instances_with_tasks_completed_by_me": {
|
||||
"columns": cls.builtin_column_options(),
|
||||
"filter_by": [
|
||||
{"field_name": "with_tasks_completed_by_me", "field_value": True},
|
||||
{"field_name": "has_terminal_status", "field_value": True},
|
||||
{"field_name": "with_tasks_completed_by_me", "field_value": "true"},
|
||||
{"field_name": "has_terminal_status", "field_value": "true"},
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
|
@ -124,13 +132,16 @@ class ProcessInstanceReportService:
|
|||
"filter_by": [
|
||||
{
|
||||
"field_name": "with_tasks_assigned_to_my_group",
|
||||
"field_value": True,
|
||||
"field_value": "true",
|
||||
},
|
||||
{"field_name": "has_terminal_status", "field_value": True},
|
||||
{"field_name": "has_terminal_status", "field_value": "true"},
|
||||
],
|
||||
"order_by": ["-start_in_seconds", "-id"],
|
||||
},
|
||||
}
|
||||
|
||||
if metadata_key not in temp_system_metadata_map:
|
||||
return None
|
||||
return temp_system_metadata_map[metadata_key]
|
||||
|
||||
@classmethod
|
||||
|
@ -157,10 +168,17 @@ class ProcessInstanceReportService:
|
|||
if process_instance_report is not None:
|
||||
return process_instance_report # type: ignore
|
||||
|
||||
report_metadata = cls.system_metadata_map(report_identifier)
|
||||
if report_metadata is None:
|
||||
raise ProcessInstanceReportNotFoundError(
|
||||
f"Could not find a report with identifier '{report_identifier}' for"
|
||||
f" user '{user.username}'"
|
||||
)
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel(
|
||||
identifier=report_identifier,
|
||||
created_by_id=user.id,
|
||||
report_metadata=cls.system_metadata_map(report_identifier),
|
||||
report_metadata=report_metadata,
|
||||
)
|
||||
|
||||
return process_instance_report # type: ignore
|
||||
|
@ -210,20 +228,22 @@ class ProcessInstanceReportService:
|
|||
with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me")
|
||||
with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group")
|
||||
with_relation_to_me = bool_value("with_relation_to_me")
|
||||
process_initiator_username = filters.get("process_initiator_username")
|
||||
|
||||
report_filter = ProcessInstanceReportFilter(
|
||||
process_model_identifier,
|
||||
user_group_identifier,
|
||||
start_from,
|
||||
start_to,
|
||||
end_from,
|
||||
end_to,
|
||||
process_status,
|
||||
initiated_by_me,
|
||||
has_terminal_status,
|
||||
with_tasks_completed_by_me,
|
||||
with_tasks_assigned_to_my_group,
|
||||
with_relation_to_me,
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
initiated_by_me=initiated_by_me,
|
||||
has_terminal_status=has_terminal_status,
|
||||
with_tasks_completed_by_me=with_tasks_completed_by_me,
|
||||
with_tasks_assigned_to_my_group=with_tasks_assigned_to_my_group,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_initiator_username=process_initiator_username,
|
||||
)
|
||||
|
||||
return report_filter
|
||||
|
@ -244,6 +264,7 @@ class ProcessInstanceReportService:
|
|||
with_tasks_completed_by_me: Optional[bool] = None,
|
||||
with_tasks_assigned_to_my_group: Optional[bool] = None,
|
||||
with_relation_to_me: Optional[bool] = None,
|
||||
process_initiator_username: Optional[str] = None,
|
||||
) -> ProcessInstanceReportFilter:
|
||||
"""Filter_from_metadata_with_overrides."""
|
||||
report_filter = cls.filter_from_metadata(process_instance_report)
|
||||
|
@ -268,6 +289,8 @@ class ProcessInstanceReportService:
|
|||
report_filter.has_terminal_status = has_terminal_status
|
||||
if with_tasks_completed_by_me is not None:
|
||||
report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me
|
||||
if process_initiator_username is not None:
|
||||
report_filter.process_initiator_username = process_initiator_username
|
||||
if with_tasks_assigned_to_my_group is not None:
|
||||
report_filter.with_tasks_assigned_to_my_group = (
|
||||
with_tasks_assigned_to_my_group
|
||||
|
@ -297,7 +320,9 @@ class ProcessInstanceReportService:
|
|||
return results
|
||||
|
||||
@classmethod
|
||||
def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore
|
||||
def get_column_names_for_model(
|
||||
cls, model: Type[SpiffworkflowBaseDBModel]
|
||||
) -> list[str]:
|
||||
"""Get_column_names_for_model."""
|
||||
return [i.name for i in model.__table__.columns]
|
||||
|
||||
|
@ -386,6 +411,17 @@ class ProcessInstanceReportService:
|
|||
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
|
||||
)
|
||||
|
||||
if report_filter.process_initiator_username is not None:
|
||||
user = UserModel.query.filter_by(
|
||||
username=report_filter.process_initiator_username
|
||||
).first()
|
||||
process_initiator_id = -1
|
||||
if user:
|
||||
process_initiator_id = user.id
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_initiator_id=process_initiator_id
|
||||
)
|
||||
|
||||
if (
|
||||
not report_filter.with_tasks_completed_by_me
|
||||
and not report_filter.with_tasks_assigned_to_my_group
|
||||
|
|
|
@ -39,6 +39,11 @@ class ProcessModelService(FileSystemService):
|
|||
GROUP_SCHEMA = ProcessGroupSchema()
|
||||
PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema()
|
||||
|
||||
@classmethod
|
||||
def path_to_id(cls, path: str) -> str:
|
||||
"""Replace the os path separator for the standard id separator."""
|
||||
return path.replace(os.sep, "/")
|
||||
|
||||
@classmethod
|
||||
def is_group(cls, path: str) -> bool:
|
||||
"""Is_group."""
|
||||
|
@ -228,7 +233,12 @@ class ProcessModelService(FileSystemService):
|
|||
user = UserService.current_user()
|
||||
new_process_model_list = []
|
||||
for process_model in process_models:
|
||||
uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
|
||||
modified_process_model_id = (
|
||||
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_model.id
|
||||
)
|
||||
)
|
||||
uri = f"/v1.0/process-instances/{modified_process_model_id}"
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=user, permission="create", target_uri=uri
|
||||
)
|
||||
|
@ -410,7 +420,7 @@ class ProcessModelService(FileSystemService):
|
|||
data = json.load(cat_json)
|
||||
# we don't store `id` in the json files, so we add it back in here
|
||||
relative_path = os.path.relpath(dir_path, FileSystemService.root_path())
|
||||
data["id"] = relative_path
|
||||
data["id"] = cls.path_to_id(relative_path)
|
||||
process_group = ProcessGroup(**data)
|
||||
if process_group is None:
|
||||
raise ApiError(
|
||||
|
@ -421,7 +431,9 @@ class ProcessModelService(FileSystemService):
|
|||
),
|
||||
)
|
||||
else:
|
||||
process_group_id = dir_path.replace(FileSystemService.root_path(), "")
|
||||
process_group_id = cls.path_to_id(
|
||||
dir_path.replace(FileSystemService.root_path(), "")
|
||||
)
|
||||
process_group = ProcessGroup(
|
||||
id="",
|
||||
display_name=process_group_id,
|
||||
|
@ -474,11 +486,7 @@ class ProcessModelService(FileSystemService):
|
|||
data.pop("process_group_id")
|
||||
# we don't save `id` in the json file, so we add it back in here.
|
||||
relative_path = os.path.relpath(path, FileSystemService.root_path())
|
||||
|
||||
# even on windows, use forward slashes for ids
|
||||
relative_path = relative_path.replace("\\", "/")
|
||||
|
||||
data["id"] = relative_path
|
||||
data["id"] = cls.path_to_id(relative_path)
|
||||
process_model_info = ProcessModelInfo(**data)
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
|
|
|
@ -354,11 +354,8 @@ class BaseTest:
|
|||
assert has_permission is expected_result
|
||||
|
||||
def modify_process_identifier_for_path_param(self, identifier: str) -> str:
|
||||
"""Identifier."""
|
||||
if "\\" in identifier:
|
||||
raise Exception(f"Found backslash in identifier: {identifier}")
|
||||
|
||||
return identifier.replace("/", ":")
|
||||
"""Modify_process_identifier_for_path_param."""
|
||||
return ProcessModelInfo.modify_process_identifier_for_path_param(identifier)
|
||||
|
||||
def un_modify_modified_process_identifier_for_path_param(
|
||||
self, modified_identifier: str
|
||||
|
|
|
@ -1296,16 +1296,16 @@ class TestProcessApi(BaseTest):
|
|||
xml_file_contents = f_open.read()
|
||||
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
|
||||
|
||||
def test_message_start_when_starting_process_instance(
|
||||
def test_message_send_when_starting_process_instance(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_message_start_when_starting_process_instance."""
|
||||
"""Test_message_send_when_starting_process_instance."""
|
||||
# ensure process model is loaded
|
||||
process_group_id = "test_message_start"
|
||||
process_group_id = "test_message_send"
|
||||
process_model_id = "message_receiver"
|
||||
bpmn_file_name = "message_receiver.bpmn"
|
||||
bpmn_file_location = "message_send_one_conversation"
|
||||
|
@ -1345,15 +1345,15 @@ class TestProcessApi(BaseTest):
|
|||
assert process_instance_data
|
||||
assert process_instance_data["the_payload"] == payload
|
||||
|
||||
def test_message_start_when_providing_message_to_running_process_instance(
|
||||
def test_message_send_when_providing_message_to_running_process_instance(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_message_start_when_providing_message_to_running_process_instance."""
|
||||
process_group_id = "test_message_start"
|
||||
"""Test_message_send_when_providing_message_to_running_process_instance."""
|
||||
process_group_id = "test_message_send"
|
||||
process_model_id = "message_sender"
|
||||
bpmn_file_name = "message_sender.bpmn"
|
||||
bpmn_file_location = "message_send_one_conversation"
|
||||
|
@ -1412,6 +1412,105 @@ class TestProcessApi(BaseTest):
|
|||
assert process_instance_data
|
||||
assert process_instance_data["the_payload"] == payload
|
||||
|
||||
def test_message_send_errors_when_providing_message_to_suspended_process_instance(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_message_send_when_providing_message_to_running_process_instance."""
|
||||
process_group_id = "test_message_send"
|
||||
process_model_id = "message_sender"
|
||||
bpmn_file_name = "message_sender.bpmn"
|
||||
bpmn_file_location = "message_send_one_conversation"
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client,
|
||||
with_super_admin_user,
|
||||
process_group_id=process_group_id,
|
||||
process_model_id=process_model_id,
|
||||
bpmn_file_name=bpmn_file_name,
|
||||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
message_model_identifier = "message_response"
|
||||
payload = {
|
||||
"the_payload": {
|
||||
"topica": "the_payload.topica_string",
|
||||
"topicb": "the_payload.topicb_string",
|
||||
"andThis": "another_item_non_key",
|
||||
}
|
||||
}
|
||||
response = self.create_process_instance_from_process_model_id_with_api(
|
||||
client,
|
||||
process_model_identifier,
|
||||
self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.json is not None
|
||||
process_instance_id = response.json["id"]
|
||||
|
||||
response = client.post(
|
||||
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
processor.suspend()
|
||||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.json
|
||||
assert response.json["error_code"] == "process_instance_is_suspended"
|
||||
|
||||
processor.resume()
|
||||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
json_data = response.json
|
||||
assert json_data
|
||||
assert json_data["status"] == "complete"
|
||||
process_instance_id = json_data["id"]
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
assert process_instance
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
process_instance_data = processor.get_data()
|
||||
assert process_instance_data
|
||||
assert process_instance_data["the_payload"] == payload
|
||||
|
||||
processor.terminate()
|
||||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.json
|
||||
assert response.json["error_code"] == "process_instance_is_terminated"
|
||||
|
||||
def test_process_instance_can_be_terminated(
|
||||
self,
|
||||
app: Flask,
|
||||
|
@ -1419,9 +1518,9 @@ class TestProcessApi(BaseTest):
|
|||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_message_start_when_providing_message_to_running_process_instance."""
|
||||
"""Test_message_send_when_providing_message_to_running_process_instance."""
|
||||
# this task will wait on a catch event
|
||||
process_group_id = "test_message_start"
|
||||
process_group_id = "test_message_send"
|
||||
process_model_id = "message_sender"
|
||||
bpmn_file_name = "message_sender.bpmn"
|
||||
bpmn_file_location = "message_send_one_conversation"
|
||||
|
@ -2188,7 +2287,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_can_get_message_instances_by_process_instance_id."""
|
||||
process_group_id = "test_message_start"
|
||||
process_group_id = "test_message_send"
|
||||
process_model_id = "message_receiver"
|
||||
bpmn_file_name = "message_receiver.bpmn"
|
||||
bpmn_file_location = "message_send_one_conversation"
|
||||
|
@ -3046,6 +3145,95 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["pagination"]["pages"] == 1
|
||||
assert response.json["pagination"]["total"] == 1
|
||||
|
||||
def test_can_get_process_instance_list_with_report_metadata_and_process_initator(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_can_get_process_instance_list_with_report_metadata_and_process_initator."""
|
||||
user_one = self.create_user_with_permission(username="user_one")
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id=(
|
||||
"save_process_instance_metadata/save_process_instance_metadata"
|
||||
),
|
||||
bpmn_file_name="save_process_instance_metadata.bpmn",
|
||||
process_model_source_directory="save_process_instance_metadata",
|
||||
)
|
||||
self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=user_one
|
||||
)
|
||||
self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=user_one
|
||||
)
|
||||
self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=with_super_admin_user
|
||||
)
|
||||
|
||||
dne_report_metadata = {
|
||||
"columns": [
|
||||
{"Header": "ID", "accessor": "id"},
|
||||
{"Header": "Status", "accessor": "status"},
|
||||
{"Header": "Process Initiator", "accessor": "username"},
|
||||
],
|
||||
"order_by": ["status"],
|
||||
"filter_by": [
|
||||
{
|
||||
"field_name": "process_initiator_username",
|
||||
"field_value": "DNE",
|
||||
"operator": "equals",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
user_one_report_metadata = {
|
||||
"columns": [
|
||||
{"Header": "ID", "accessor": "id"},
|
||||
{"Header": "Status", "accessor": "status"},
|
||||
{"Header": "Process Initiator", "accessor": "username"},
|
||||
],
|
||||
"order_by": ["status"],
|
||||
"filter_by": [
|
||||
{
|
||||
"field_name": "process_initiator_username",
|
||||
"field_value": user_one.username,
|
||||
"operator": "equals",
|
||||
}
|
||||
],
|
||||
}
|
||||
process_instance_report_dne = ProcessInstanceReportModel.create_with_attributes(
|
||||
identifier="dne_report",
|
||||
report_metadata=dne_report_metadata,
|
||||
user=user_one,
|
||||
)
|
||||
process_instance_report_user_one = (
|
||||
ProcessInstanceReportModel.create_with_attributes(
|
||||
identifier="user_one_report",
|
||||
report_metadata=user_one_report_metadata,
|
||||
user=user_one,
|
||||
)
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances?report_identifier={process_instance_report_user_one.identifier}",
|
||||
headers=self.logged_in_headers(user_one),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert response.status_code == 200
|
||||
assert len(response.json["results"]) == 2
|
||||
assert response.json["results"][0]["username"] == user_one.username
|
||||
assert response.json["results"][1]["username"] == user_one.username
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances?report_identifier={process_instance_report_dne.identifier}",
|
||||
headers=self.logged_in_headers(user_one),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert response.status_code == 200
|
||||
assert len(response.json["results"]) == 0
|
||||
|
||||
def test_can_get_process_instance_report_column_list(
|
||||
self,
|
||||
app: Flask,
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
"""Test_users_controller."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
class TestProcessInstancesController(BaseTest):
|
||||
"""TestProcessInstancesController."""
|
||||
|
||||
def test_find_by_id(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_user_search_returns_a_user."""
|
||||
user_one = self.create_user_with_permission(
|
||||
username="user_one", target_uri="/process-instances/find-by-id/*"
|
||||
)
|
||||
user_two = self.create_user_with_permission(
|
||||
username="user_two", target_uri="/process-instances/find-by-id/*"
|
||||
)
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id="group/sample",
|
||||
bpmn_file_name="sample.bpmn",
|
||||
process_model_source_directory="sample",
|
||||
)
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=user_one
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances/find-by-id/{process_instance.id}",
|
||||
headers=self.logged_in_headers(user_one),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json
|
||||
assert "process_instance" in response.json
|
||||
assert response.json["process_instance"]["id"] == process_instance.id
|
||||
assert response.json["uri_type"] == "for-me"
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances/find-by-id/{process_instance.id}",
|
||||
headers=self.logged_in_headers(user_two),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances/find-by-id/{process_instance.id}",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json
|
||||
assert "process_instance" in response.json
|
||||
assert response.json["process_instance"]["id"] == process_instance.id
|
||||
assert response.json["uri_type"] is None
|
|
@ -277,6 +277,7 @@ class TestAuthorizationService(BaseTest):
|
|||
) -> None:
|
||||
"""Test_explode_permissions_basic."""
|
||||
expected_permissions = [
|
||||
("/process-instances/find-by-id/*", "read"),
|
||||
("/process-instances/for-me", "read"),
|
||||
("/process-instances/reports/*", "create"),
|
||||
("/process-instances/reports/*", "delete"),
|
||||
|
|
|
@ -24,12 +24,9 @@ const deleteVideosOnSuccess = (on) => {
|
|||
})
|
||||
}
|
||||
|
||||
module.exports = defineConfig({
|
||||
const cypressConfig = {
|
||||
projectId: 'crax1q',
|
||||
|
||||
// since it's slow
|
||||
videoCompression: useVideoCompression,
|
||||
|
||||
videoUploadOnPasses: false,
|
||||
chromeWebSecurity: false,
|
||||
e2e: {
|
||||
|
@ -45,4 +42,11 @@ module.exports = defineConfig({
|
|||
// https://github.com/cypress-io/cypress/issues/2353
|
||||
// https://docs.cypress.io/guides/core-concepts/interacting-with-elements#Scrolling
|
||||
scrollBehavior: "center",
|
||||
});
|
||||
}
|
||||
|
||||
if (!process.env.CYPRESS_RECORD_KEY) {
|
||||
// since it's slow
|
||||
cypressConfig.videoCompression = false
|
||||
}
|
||||
|
||||
module.exports = defineConfig(cypressConfig)
|
||||
|
|
|
@ -12,6 +12,7 @@ import {
|
|||
ProcessModel,
|
||||
ReportColumn,
|
||||
ReportMetadata,
|
||||
User,
|
||||
} from '../interfaces';
|
||||
import HttpService from '../services/HttpService';
|
||||
|
||||
|
@ -20,6 +21,7 @@ type OwnProps = {
|
|||
columnArray: ReportColumn[];
|
||||
orderBy: string;
|
||||
processModelSelection: ProcessModel | null;
|
||||
processInitiatorSelection: User | null;
|
||||
processStatusSelection: string[];
|
||||
startFromSeconds: string | null;
|
||||
startToSeconds: string | null;
|
||||
|
@ -36,6 +38,7 @@ export default function ProcessInstanceListSaveAsReport({
|
|||
columnArray,
|
||||
orderBy,
|
||||
processModelSelection,
|
||||
processInitiatorSelection,
|
||||
processInstanceReportSelection,
|
||||
processStatusSelection,
|
||||
startFromSeconds,
|
||||
|
@ -86,6 +89,13 @@ export default function ProcessInstanceListSaveAsReport({
|
|||
});
|
||||
}
|
||||
|
||||
if (processInitiatorSelection) {
|
||||
filterByArray.push({
|
||||
field_name: 'process_initiator_username',
|
||||
field_value: processInitiatorSelection.username,
|
||||
});
|
||||
}
|
||||
|
||||
if (processStatusSelection.length > 0) {
|
||||
filterByArray.push({
|
||||
field_name: 'process_status',
|
||||
|
|
|
@ -193,11 +193,42 @@ export default function ProcessInstanceListTable({
|
|||
setEndToTime,
|
||||
]);
|
||||
|
||||
const handleProcessInstanceInitiatorSearchResult = (
|
||||
result: any,
|
||||
inputText: string
|
||||
) => {
|
||||
if (lastRequestedInitatorSearchTerm.current === result.username_prefix) {
|
||||
setProcessInstanceInitiatorOptions(result.users);
|
||||
result.users.forEach((user: User) => {
|
||||
if (user.username === inputText) {
|
||||
setProcessInitiatorSelection(user);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const searchForProcessInitiator = (inputText: string) => {
|
||||
if (inputText) {
|
||||
lastRequestedInitatorSearchTerm.current = inputText;
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/users/search?username_prefix=${inputText}`,
|
||||
successCallback: (result: any) =>
|
||||
handleProcessInstanceInitiatorSearchResult(result, inputText),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const parametersToGetFromSearchParams = useMemo(() => {
|
||||
const figureOutProcessInitiator = (processInitiatorSearchText: string) => {
|
||||
searchForProcessInitiator(processInitiatorSearchText);
|
||||
};
|
||||
|
||||
return {
|
||||
process_model_identifier: null,
|
||||
process_status: null,
|
||||
process_initiator_username: figureOutProcessInitiator,
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
|
@ -384,6 +415,12 @@ export default function ProcessInstanceListTable({
|
|||
}
|
||||
});
|
||||
|
||||
if (filters.process_initiator_username) {
|
||||
const functionToCall =
|
||||
parametersToGetFromSearchParams.process_initiator_username;
|
||||
functionToCall(filters.process_initiator_username);
|
||||
}
|
||||
|
||||
const processStatusSelectedArray: string[] = [];
|
||||
if (filters.process_status) {
|
||||
PROCESS_STATUSES.forEach((processStatusOption: any) => {
|
||||
|
@ -538,8 +575,13 @@ export default function ProcessInstanceListTable({
|
|||
queryParamString += `&report_id=${processInstanceReportSelection.id}`;
|
||||
}
|
||||
|
||||
if (processInitiatorSelection) {
|
||||
queryParamString += `&process_initiator_username=${processInitiatorSelection.username}`;
|
||||
}
|
||||
|
||||
setErrorObject(null);
|
||||
setProcessInstanceReportJustSaved(null);
|
||||
setProcessInstanceFilters({});
|
||||
navigate(`${processInstanceListPathPrefix}?${queryParamString}`);
|
||||
};
|
||||
|
||||
|
@ -682,6 +724,7 @@ export default function ProcessInstanceListTable({
|
|||
orderBy=""
|
||||
buttonText="Save"
|
||||
processModelSelection={processModelSelection}
|
||||
processInitiatorSelection={processInitiatorSelection}
|
||||
processStatusSelection={processStatusSelection}
|
||||
processInstanceReportSelection={processInstanceReportSelection}
|
||||
reportMetadata={reportMetadata}
|
||||
|
@ -987,22 +1030,6 @@ export default function ProcessInstanceListTable({
|
|||
return null;
|
||||
};
|
||||
|
||||
const handleProcessInstanceInitiatorSearchResult = (result: any) => {
|
||||
if (lastRequestedInitatorSearchTerm.current === result.username_prefix) {
|
||||
setProcessInstanceInitiatorOptions(result.users);
|
||||
}
|
||||
};
|
||||
|
||||
const searchForProcessInitiator = (inputText: string) => {
|
||||
if (inputText) {
|
||||
lastRequestedInitatorSearchTerm.current = inputText;
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/users/search?username_prefix=${inputText}`,
|
||||
successCallback: handleProcessInstanceInitiatorSearchResult,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const filterOptions = () => {
|
||||
if (!showFilterOptions) {
|
||||
return null;
|
||||
|
|
|
@ -264,9 +264,6 @@ export default function ReactDiagramEditor({
|
|||
handleLaunchMarkdownEditor(element, value, eventBus);
|
||||
});
|
||||
|
||||
/**
|
||||
* fixme: this is not in use yet, we need the ability to find bpmn files by id.
|
||||
*/
|
||||
diagramModeler.on('spiff.callactivity.edit', (event: any) => {
|
||||
if (onLaunchBpmnEditor) {
|
||||
onLaunchBpmnEditor(event.processId);
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import {
|
||||
convertSecondsToFormattedDateString,
|
||||
isInteger,
|
||||
slugifyString,
|
||||
underscorizeString,
|
||||
} from './helpers';
|
||||
|
@ -20,3 +21,11 @@ test('it can keep the correct date when converting seconds to date', () => {
|
|||
const dateString = convertSecondsToFormattedDateString(1666325400);
|
||||
expect(dateString).toEqual('2022-10-21');
|
||||
});
|
||||
|
||||
test('it can validate numeric values', () => {
|
||||
expect(isInteger('11')).toEqual(true);
|
||||
expect(isInteger('hey')).toEqual(false);
|
||||
expect(isInteger(' ')).toEqual(false);
|
||||
expect(isInteger('1 2')).toEqual(false);
|
||||
expect(isInteger(2)).toEqual(true);
|
||||
});
|
||||
|
|
|
@ -219,6 +219,20 @@ export const refreshAtInterval = (
|
|||
};
|
||||
};
|
||||
|
||||
// bpmn:SubProcess shape elements do not have children
|
||||
// but their moddle elements / businessOjects have flowElements
|
||||
// that can include the moddleElement of the subprocesses
|
||||
const getChildProcessesFromModdleElement = (bpmnModdleElement: any) => {
|
||||
let elements: string[] = [];
|
||||
bpmnModdleElement.flowElements.forEach((c: any) => {
|
||||
if (c.$type === 'bpmn:SubProcess') {
|
||||
elements.push(c.id);
|
||||
elements = [...elements, ...getChildProcessesFromModdleElement(c)];
|
||||
}
|
||||
});
|
||||
return elements;
|
||||
};
|
||||
|
||||
const getChildProcesses = (bpmnElement: any) => {
|
||||
let elements: string[] = [];
|
||||
bpmnElement.children.forEach((c: any) => {
|
||||
|
@ -229,6 +243,10 @@ const getChildProcesses = (bpmnElement: any) => {
|
|||
elements = [...elements, ...getChildProcesses(c)];
|
||||
} else if (c.type === 'bpmn:SubProcess') {
|
||||
elements.push(c.id);
|
||||
elements = [
|
||||
...elements,
|
||||
...getChildProcessesFromModdleElement(c.businessObject),
|
||||
];
|
||||
}
|
||||
});
|
||||
return elements;
|
||||
|
@ -253,3 +271,7 @@ export const setErrorMessageSafely = (
|
|||
errorMessageSetter({ message: newErrorMessageString });
|
||||
return null;
|
||||
};
|
||||
|
||||
export const isInteger = (str: string | number) => {
|
||||
return /^\d+$/.test(str.toString());
|
||||
};
|
||||
|
|
|
@ -23,6 +23,7 @@ import MessageInstanceList from './MessageInstanceList';
|
|||
import Configuration from './Configuration';
|
||||
import JsonSchemaFormBuilder from './JsonSchemaFormBuilder';
|
||||
import ProcessModelNewExperimental from './ProcessModelNewExperimental';
|
||||
import ProcessInstanceFindById from './ProcessInstanceFindById';
|
||||
|
||||
export default function AdminRoutes() {
|
||||
const location = useLocation();
|
||||
|
@ -133,6 +134,10 @@ export default function AdminRoutes() {
|
|||
path="process-models/:process_model_id/form-builder"
|
||||
element={<JsonSchemaFormBuilder />}
|
||||
/>
|
||||
<Route
|
||||
path="process-instances/find-by-id"
|
||||
element={<ProcessInstanceFindById />}
|
||||
/>
|
||||
</Routes>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
// @ts-ignore
|
||||
import { Button, ButtonSet, Form, Stack, TextInput } from '@carbon/react';
|
||||
import { isInteger, modifyProcessIdentifierForPathParam } from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { ProcessInstance } from '../interfaces';
|
||||
|
||||
export default function ProcessInstanceFindById() {
|
||||
const navigate = useNavigate();
|
||||
const [processInstanceId, setProcessInstanceId] = useState<string>('');
|
||||
const [processInstanceIdValid, setProcessInstanceIdValid] =
|
||||
useState<boolean>(true);
|
||||
|
||||
useEffect(() => {}, []);
|
||||
|
||||
const handleProcessInstanceNavigation = (result: any) => {
|
||||
const processInstance: ProcessInstance = result.process_instance;
|
||||
let path = '/admin/process-instances/';
|
||||
if (result.uri_type === 'for-me') {
|
||||
path += 'for-me/';
|
||||
}
|
||||
path += `${modifyProcessIdentifierForPathParam(
|
||||
processInstance.process_model_identifier
|
||||
)}/${processInstance.id}`;
|
||||
navigate(path);
|
||||
};
|
||||
|
||||
const handleFormSubmission = (event: any) => {
|
||||
event.preventDefault();
|
||||
|
||||
if (!processInstanceId) {
|
||||
setProcessInstanceIdValid(false);
|
||||
}
|
||||
|
||||
if (processInstanceId && processInstanceIdValid) {
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-instances/find-by-id/${processInstanceId}`,
|
||||
successCallback: handleProcessInstanceNavigation,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleProcessInstanceIdChange = (event: any) => {
|
||||
if (isInteger(event.target.value)) {
|
||||
setProcessInstanceIdValid(true);
|
||||
} else {
|
||||
setProcessInstanceIdValid(false);
|
||||
}
|
||||
setProcessInstanceId(event.target.value);
|
||||
};
|
||||
|
||||
const formElements = () => {
|
||||
return (
|
||||
<TextInput
|
||||
id="process-instance-id-input"
|
||||
invalidText="Process Instance Id must be a number."
|
||||
invalid={!processInstanceIdValid}
|
||||
labelText="Process Instance Id*"
|
||||
value={processInstanceId}
|
||||
onChange={handleProcessInstanceIdChange}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
const formButtons = () => {
|
||||
const buttons = [<Button type="submit">Submit</Button>];
|
||||
return <ButtonSet>{buttons}</ButtonSet>;
|
||||
};
|
||||
|
||||
return (
|
||||
<Form onSubmit={handleFormSubmission}>
|
||||
<Stack gap={5}>
|
||||
{formElements()}
|
||||
{formButtons()}
|
||||
</Stack>
|
||||
</Form>
|
||||
);
|
||||
}
|
|
@ -85,6 +85,15 @@ export default function ProcessInstanceList({ variant }: OwnProps) {
|
|||
All
|
||||
</Tab>
|
||||
</Can>
|
||||
<Tab
|
||||
title="Search for a process instance by id."
|
||||
data-qa="process-instance-list-find-by-id"
|
||||
onClick={() => {
|
||||
navigate('/admin/process-instances/find-by-id');
|
||||
}}
|
||||
>
|
||||
Find By Id
|
||||
</Tab>
|
||||
</TabList>
|
||||
</Tabs>
|
||||
<br />
|
||||
|
|
|
@ -205,7 +205,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
!callingSubprocessId ||
|
||||
callingSubprocessId === task.calling_subprocess_task_id
|
||||
) {
|
||||
console.log('callingSubprocessId', callingSubprocessId);
|
||||
if (task.state === 'COMPLETED') {
|
||||
(taskIds.completed as any).push(task);
|
||||
}
|
||||
|
|
|
@ -127,7 +127,7 @@ export default function ProcessModelEditDiagram() {
|
|||
path: `/processes`,
|
||||
successCallback: processResults,
|
||||
});
|
||||
}, [processModel]);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const processResult = (result: ProcessModel) => {
|
||||
|
@ -731,7 +731,7 @@ export default function ProcessModelEditDiagram() {
|
|||
);
|
||||
};
|
||||
const onLaunchMarkdownEditor = (
|
||||
element: any,
|
||||
_element: any,
|
||||
markdown: string,
|
||||
eventBus: any
|
||||
) => {
|
||||
|
@ -767,7 +767,7 @@ export default function ProcessModelEditDiagram() {
|
|||
};
|
||||
|
||||
const onSearchProcessModels = (
|
||||
processId: string,
|
||||
_processId: string,
|
||||
eventBus: any,
|
||||
element: any
|
||||
) => {
|
||||
|
@ -792,6 +792,7 @@ export default function ProcessModelEditDiagram() {
|
|||
open={showProcessSearch}
|
||||
modalHeading="Select Process Model"
|
||||
primaryButtonText="Close"
|
||||
onRequestClose={processSearchOnClose}
|
||||
onRequestSubmit={processSearchOnClose}
|
||||
size="lg"
|
||||
>
|
||||
|
@ -826,22 +827,30 @@ export default function ProcessModelEditDiagram() {
|
|||
};
|
||||
|
||||
const onLaunchBpmnEditor = (processId: string) => {
|
||||
const processRef = processes.find((p) => {
|
||||
return p.identifier === processId;
|
||||
// using the "setState" method with a function gives us access to the
|
||||
// most current state of processes. Otherwise it uses the stale state
|
||||
// when passing the callback to a non-React component like bpmn-js:
|
||||
// https://stackoverflow.com/a/60643670/6090676
|
||||
setProcesses((upToDateProcesses: ProcessReference[]) => {
|
||||
const processRef = upToDateProcesses.find((p) => {
|
||||
return p.identifier === processId;
|
||||
});
|
||||
if (processRef) {
|
||||
const path = generatePath(
|
||||
'/admin/process-models/:process_model_path/files/:file_name',
|
||||
{
|
||||
process_model_path: modifyProcessIdentifierForPathParam(
|
||||
processRef.process_model_id
|
||||
),
|
||||
file_name: processRef.file_name,
|
||||
}
|
||||
);
|
||||
window.open(path);
|
||||
}
|
||||
return upToDateProcesses;
|
||||
});
|
||||
if (processRef) {
|
||||
const path = generatePath(
|
||||
'/admin/process-models/:process_model_path/files/:file_name',
|
||||
{
|
||||
process_model_path: modifyProcessIdentifierForPathParam(
|
||||
processRef.process_model_id
|
||||
),
|
||||
file_name: processRef.file_name,
|
||||
}
|
||||
);
|
||||
window.open(path);
|
||||
}
|
||||
};
|
||||
|
||||
const onLaunchJsonEditor = (fileName: string) => {
|
||||
const path = generatePath(
|
||||
'/admin/process-models/:process_model_id/form/:file_name',
|
||||
|
@ -957,6 +966,7 @@ export default function ProcessModelEditDiagram() {
|
|||
{scriptEditorAndTests()}
|
||||
{markdownEditor()}
|
||||
{processModelSelector()}
|
||||
{`Processes length: ${processes.length}`}
|
||||
<div id="diagram-container" />
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -100,7 +100,7 @@ export default function ProcessModelShow() {
|
|||
onClose={() => setProcessInstance(null)}
|
||||
>
|
||||
<Link
|
||||
to={`/admin/process-instances/${modifiedProcessModelId}/${processInstance.id}`}
|
||||
to={`/admin/process-instances/for-me/${modifiedProcessModelId}/${processInstance.id}`}
|
||||
data-qa="process-instance-show-link"
|
||||
>
|
||||
view
|
||||
|
@ -685,7 +685,7 @@ export default function ProcessModelShow() {
|
|||
perPageOptions={[2, 5, 25]}
|
||||
showReports={false}
|
||||
/>
|
||||
<span data-qa="process-model-show-permissions-loaded">true</span>
|
||||
<span data-qa="process-model-show-permissions-loaded" />
|
||||
</Can>
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -10,9 +10,13 @@ import { BACKEND_BASE_URL } from '../config';
|
|||
// Some explanation:
|
||||
// https://dev.to/nilanth/how-to-secure-jwt-in-a-single-page-application-cko
|
||||
|
||||
// const getCurrentLocation = (queryParams: string = window.location.search) => {
|
||||
const getCurrentLocation = () => {
|
||||
// to trim off any query params
|
||||
return `${window.location.origin}${window.location.pathname}`;
|
||||
const queryParamString = '';
|
||||
// if (queryParams) {
|
||||
// queryParamString = `?${queryParams}`;
|
||||
// }
|
||||
return `${window.location.origin}${window.location.pathname}${queryParamString}`;
|
||||
};
|
||||
|
||||
const doLogin = () => {
|
||||
|
@ -60,18 +64,20 @@ const getPreferredUsername = () => {
|
|||
// FIXME: we could probably change this search to a hook
|
||||
// and then could use useSearchParams here instead
|
||||
const getAuthTokenFromParams = () => {
|
||||
const queryParams = window.location.search;
|
||||
const accessTokenMatch = queryParams.match(/.*\baccess_token=([^&]+).*/);
|
||||
const idTokenMatch = queryParams.match(/.*\bid_token=([^&]+).*/);
|
||||
if (accessTokenMatch) {
|
||||
const accessToken = accessTokenMatch[1];
|
||||
const queryParams = new URLSearchParams(window.location.search);
|
||||
const accessToken = queryParams.get('access_token');
|
||||
const idToken = queryParams.get('id_token');
|
||||
|
||||
queryParams.delete('access_token');
|
||||
queryParams.delete('id_token');
|
||||
|
||||
if (accessToken) {
|
||||
localStorage.setItem('jwtAccessToken', accessToken);
|
||||
if (idTokenMatch) {
|
||||
const idToken = idTokenMatch[1];
|
||||
if (idToken) {
|
||||
localStorage.setItem('jwtIdToken', idToken);
|
||||
}
|
||||
// to remove token query param
|
||||
window.location.href = getCurrentLocation();
|
||||
// window.location.href = `${getCurrentLocation(queryParams.toString())}`;
|
||||
window.location.href = `${getCurrentLocation()}`;
|
||||
} else if (!isLoggedIn()) {
|
||||
doLogin();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue