From 761023c6214f09c48b0d323ad452cb8480afcce9 Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 26 Jan 2023 18:26:17 -0500 Subject: [PATCH 01/59] Workflow Data Exceptions were not getting processed, we now catch the WorkflowDataException through the generic top level SpiffWorkflowException. --- spiffworkflow-backend/pyproject.toml | 4 ++-- .../spiffworkflow_backend/exceptions/api_error.py | 12 +++++++----- .../services/process_instance_processor.py | 8 ++++---- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 9f8a29443..da45e6848 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -29,8 +29,8 @@ flask-restful = "*" werkzeug = "*" # temporarily switch off main to fix CI because poetry export doesn't capture the revision if it's not here (it ignores the lock) # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "450ef3bcd639b6bc1c115fbe35bf3f93946cb0c7"} -# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } +# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "450ef3bcd639b6bc1c115fbe35bf3f93946cb0c7"} +SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" sphinx-autoapi = "^2.0" flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 02a66a207..58821d0c2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -8,6 +8,7 @@ from typing import Any import flask.wrappers import sentry_sdk +from SpiffWorkflow.bpmn.exceptions import WorkflowDataException from flask import Blueprint from flask import current_app from flask import g @@ -15,7 +16,7 @@ from flask import jsonify from flask import make_response from sentry_sdk import capture_exception from sentry_sdk import set_tag -from SpiffWorkflow.exceptions import WorkflowException # type: ignore +from SpiffWorkflow.exceptions import WorkflowException, SpiffWorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.specs.base import TaskSpec # type: ignore from SpiffWorkflow.task import Task # type: ignore @@ -131,7 +132,7 @@ class ApiError(Exception): cls, error_code: str, message: str, - exp: WorkflowException, + exp: SpiffWorkflowException, ) -> ApiError: """Deals with workflow exceptions. @@ -140,6 +141,7 @@ class ApiError(Exception): we can with the data we have. """ if isinstance(exp, WorkflowTaskException): + # Note that WorkflowDataExceptions are also WorkflowTaskExceptions return ApiError.from_task( error_code, message, @@ -150,10 +152,10 @@ class ApiError(Exception): error_line=exp.error_line, task_trace=exp.task_trace, ) - - else: + elif isinstance(exp, WorkflowException): return ApiError.from_task_spec(error_code, message, exp.task_spec) - + else: + return ApiError("workflow_error", str(exp)) def set_user_sentry_context() -> None: """Set_user_sentry_context.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 9063f2780..469227b8d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -19,6 +19,7 @@ from typing import TypedDict from typing import Union from uuid import UUID +import SpiffWorkflow import dateparser import pytz from flask import current_app @@ -37,7 +38,7 @@ from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ig from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore -from SpiffWorkflow.exceptions import WorkflowException # type: ignore +from SpiffWorkflow.exceptions import WorkflowException, SpiffWorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore @@ -1411,9 +1412,8 @@ class ProcessInstanceProcessor: if hasattr(handler, "bulk_insert_logs"): handler.bulk_insert_logs() # type: ignore db.session.commit() - - except WorkflowTaskException as we: - raise ApiError.from_workflow_exception("task_error", str(we), we) from we + except SpiffWorkflowException as swe: + raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe finally: if save: From a68c11b6f5d98b9eb94d94a97b86e4fce30eff82 Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 26 Jan 2023 18:39:51 -0500 Subject: [PATCH 02/59] run_pyl --- spiffworkflow-backend/pyproject.toml | 4 ++-- .../src/spiffworkflow_backend/exceptions/api_error.py | 5 +++-- .../services/process_instance_processor.py | 4 ++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index da45e6848..89edd680a 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -28,9 +28,9 @@ flask-migrate = "*" flask-restful = "*" werkzeug = "*" # temporarily switch off main to fix CI because poetry export doesn't capture the revision if it's not here (it ignores the lock) -# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "450ef3bcd639b6bc1c115fbe35bf3f93946cb0c7"} -SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } +# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" sphinx-autoapi = "^2.0" flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 58821d0c2..5574fe912 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -8,7 +8,6 @@ from typing import Any import flask.wrappers import sentry_sdk -from SpiffWorkflow.bpmn.exceptions import WorkflowDataException from flask import Blueprint from flask import current_app from flask import g @@ -16,7 +15,8 @@ from flask import jsonify from flask import make_response from sentry_sdk import capture_exception from sentry_sdk import set_tag -from SpiffWorkflow.exceptions import WorkflowException, SpiffWorkflowException # type: ignore +from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore +from SpiffWorkflow.exceptions import WorkflowException from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.specs.base import TaskSpec # type: ignore from SpiffWorkflow.task import Task # type: ignore @@ -157,6 +157,7 @@ class ApiError(Exception): else: return ApiError("workflow_error", str(exp)) + def set_user_sentry_context() -> None: """Set_user_sentry_context.""" try: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 469227b8d..c63380c95 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -19,7 +19,6 @@ from typing import TypedDict from typing import Union from uuid import UUID -import SpiffWorkflow import dateparser import pytz from flask import current_app @@ -38,7 +37,8 @@ from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ig from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore -from SpiffWorkflow.exceptions import WorkflowException, SpiffWorkflowException # type: ignore +from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore +from SpiffWorkflow.exceptions import WorkflowException from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore From fbf3dd788f4bc7aeb29702c61232a33ebd420c1f Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 26 Jan 2023 21:59:02 -0500 Subject: [PATCH 03/59] update lock file in backend and arena, though that one needs pruning --- poetry.lock | 2 +- spiffworkflow-backend/poetry.lock | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3608303e0..eddb1917b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1760,7 +1760,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "1f51db962ccaed5810f5d0f7d76a932f056430ab" +resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331" [[package]] name = "sqlalchemy" diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index b22d24664..6456f9572 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1824,8 +1824,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "450ef3bcd639b6bc1c115fbe35bf3f93946cb0c7" -resolved_reference = "450ef3bcd639b6bc1c115fbe35bf3f93946cb0c7" +reference = "main" +resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331" [[package]] name = "SQLAlchemy" @@ -2204,7 +2204,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "95c08ed2de5b5d047474666c9e9a5ff3e7e94e6184649c2aa6d3a961711f14b0" +content-hash = "b16e8fb0cf991bcba08c3ef1ddf205f5899c622a10c79a7f50fb55a36d53b179" [metadata.files] alabaster = [ @@ -2546,6 +2546,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2554,6 +2555,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2562,6 +2564,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, @@ -2863,10 +2866,7 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, - {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, From 4157a35f1ab90d97343da146db02e7b88aa351a2 Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 30 Jan 2023 13:09:23 -0500 Subject: [PATCH 04/59] Use the id_token, not the auth_token from the open id server for authentication with the front end. The auth_token should be kept safe, and not guranteeded to be a json token. --- .../src/spiffworkflow_backend/routes/user.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 976f7883c..50c6f88a2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -96,7 +96,7 @@ def verify_token( ) if auth_token and "error" not in auth_token: tld = current_app.config["THREAD_LOCAL_DATA"] - tld.new_access_token = auth_token["access_token"] + tld.new_access_token = auth_token["id_token"] tld.new_id_token = auth_token["id_token"] # We have the user, but this code is a bit convoluted, and will later demand # a user_info object so it can look up the user. Sorry to leave this crap here. @@ -186,6 +186,7 @@ def set_new_access_token_in_cookie( ): domain_for_frontend_cookie = None + # fixme - we should not be passing the access token back to the client if hasattr(tld, "new_access_token") and tld.new_access_token: response.set_cookie( "access_token", tld.new_access_token, domain=domain_for_frontend_cookie @@ -254,7 +255,7 @@ def parse_id_token(token: str) -> Any: return json.loads(decoded) -def login_return(code: str, state: str, session_state: str) -> Optional[Response]: +def login_return(code: str, state: str, session_state: str = None) -> Optional[Response]: """Login_return.""" state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_redirect_url = state_dict["redirect_url"] @@ -269,12 +270,13 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response user_model = AuthorizationService.create_user_from_sign_in(user_info) g.user = user_model.id g.token = auth_token_object["id_token"] - AuthenticationService.store_refresh_token( - user_model.id, auth_token_object["refresh_token"] - ) + if "refresh_token" in auth_token_object: + AuthenticationService.store_refresh_token( + user_model.id, auth_token_object["refresh_token"] + ) redirect_url = state_redirect_url tld = current_app.config["THREAD_LOCAL_DATA"] - tld.new_access_token = auth_token_object["access_token"] + tld.new_access_token = auth_token_object["id_token"] tld.new_id_token = auth_token_object["id_token"] return redirect(redirect_url) From fcbf26d7f4803e08c55b75387f0e9c100401663d Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 30 Jan 2023 16:50:43 -0500 Subject: [PATCH 05/59] Fix typing issue. --- spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 50c6f88a2..6873198a3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -255,7 +255,7 @@ def parse_id_token(token: str) -> Any: return json.loads(decoded) -def login_return(code: str, state: str, session_state: str = None) -> Optional[Response]: +def login_return(code: str, state: str, session_state: str = "") -> Optional[Response]: """Login_return.""" state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_redirect_url = state_dict["redirect_url"] From 28e9bcb429fd041ee5e01e193267947ed4591481 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 22:35:24 -0500 Subject: [PATCH 06/59] add deps for serve --- spiffworkflow-frontend/.dockerignore | 1 + spiffworkflow-frontend/Dockerfile | 7 ++++ spiffworkflow-frontend/package.justserve.json | 36 +++++++++++++++++++ 3 files changed, 44 insertions(+) create mode 100644 spiffworkflow-frontend/.dockerignore create mode 100644 spiffworkflow-frontend/package.justserve.json diff --git a/spiffworkflow-frontend/.dockerignore b/spiffworkflow-frontend/.dockerignore new file mode 100644 index 000000000..07e6e472c --- /dev/null +++ b/spiffworkflow-frontend/.dockerignore @@ -0,0 +1 @@ +/node_modules diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index f06ade93a..c39c20dc1 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -12,6 +12,12 @@ FROM base AS setup COPY . /app/ +RUN cp /app/package.json /app/package.json.bak +ADD package.justserve.json /app/package.json +RUN npm ci --ignore-scripts +RUN cp -r /app/node_modules /app/node_modules.justserve +RUN cp /app/package.json.bak /app/package.json + # npm ci because it respects the lock file. # --ignore-scripts because authors can do bad things in postinstall scripts. # https://cheatsheetseries.owasp.org/cheatsheets/NPM_Security_Cheat_Sheet.html @@ -31,5 +37,6 @@ ENV PORT0=7001 COPY --from=setup /app/build /app/build COPY --from=setup /app/bin /app/bin +COPY --from=setup /app/node_modules.justserve /app/node_modules ENTRYPOINT ["/app/bin/boot_server_in_docker"] diff --git a/spiffworkflow-frontend/package.justserve.json b/spiffworkflow-frontend/package.justserve.json new file mode 100644 index 000000000..d78004a37 --- /dev/null +++ b/spiffworkflow-frontend/package.justserve.json @@ -0,0 +1,36 @@ +{ + "name": "spiffworkflow-frontend", + "version": "0.1.0", + "private": true, + "dependencies": { + "serve": "^14.0.0" + }, + "scripts": { + "start": "ESLINT_NO_DEV_ERRORS=true PORT=7001 craco start", + "build": "craco build", + "test": "react-scripts test --coverage", + "t": "npm test -- --watchAll=false", + "eject": "craco eject", + "format": "prettier --write src/**/*.[tj]s{,x}", + "lint": "./node_modules/.bin/eslint src", + "lint:fix": "./node_modules/.bin/eslint --fix src" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} From a5adb8556d472ce35e914215a447ed4c6fd940f2 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 09:27:02 -0500 Subject: [PATCH 07/59] less annoying file name for autocomplete --- spiffworkflow-frontend/Dockerfile | 2 +- .../{package.justserve.json => justservewebserver.package.json} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename spiffworkflow-frontend/{package.justserve.json => justservewebserver.package.json} (100%) diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index c39c20dc1..c777a2683 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -13,7 +13,7 @@ FROM base AS setup COPY . /app/ RUN cp /app/package.json /app/package.json.bak -ADD package.justserve.json /app/package.json +ADD justservewebserver.package.json /app/package.json RUN npm ci --ignore-scripts RUN cp -r /app/node_modules /app/node_modules.justserve RUN cp /app/package.json.bak /app/package.json diff --git a/spiffworkflow-frontend/package.justserve.json b/spiffworkflow-frontend/justservewebserver.package.json similarity index 100% rename from spiffworkflow-frontend/package.justserve.json rename to spiffworkflow-frontend/justservewebserver.package.json From 8a53c5103b233be976ab5f2e8b5ede248fe47f10 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 16:14:22 -0500 Subject: [PATCH 08/59] shuffle around Dockerfile to allow to work for background container --- spiffworkflow-backend/Dockerfile | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/spiffworkflow-backend/Dockerfile b/spiffworkflow-backend/Dockerfile index f05f1973b..f4a8f8ec1 100644 --- a/spiffworkflow-backend/Dockerfile +++ b/spiffworkflow-backend/Dockerfile @@ -7,6 +7,14 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH" WORKDIR /app +# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much. +FROM base AS deployment + +RUN apt-get update \ + && apt-get clean -y \ + && apt-get install -y -q curl git-core gunicorn3 default-mysql-client \ + && rm -rf /var/lib/apt/lists/* + # Setup image for installing Python dependencies. FROM base AS setup @@ -20,16 +28,11 @@ COPY . /app RUN poetry install --without dev # Final image without setup dependencies. -FROM base AS final +FROM deployment AS final LABEL source="https://github.com/sartography/spiff-arena" LABEL description="Software development platform for building, running, and monitoring executable diagrams" -RUN apt-get update \ - && apt-get clean -y \ - && apt-get install -y -q curl git-core gunicorn3 default-mysql-client \ - && rm -rf /var/lib/apt/lists/* - COPY --from=setup /app /app -ENTRYPOINT ["./bin/boot_server_in_docker"] +CMD ["./bin/boot_server_in_docker"] From 1d68580ae605ded8782a726df217f6440ea350cf Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 16:32:04 -0500 Subject: [PATCH 09/59] remove duplicate label on radio buttons --- .../src/themes/carbon/RadioWidget/RadioWidget.tsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx b/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx index 86dad81ea..2f65bca97 100644 --- a/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx @@ -32,9 +32,6 @@ const RadioWidget = ({ return ( <> - - {label || schema.title} - Date: Tue, 31 Jan 2023 17:11:11 -0500 Subject: [PATCH 10/59] folks who can start instances can also view their logs --- .../services/authorization_service.py | 8 +++++++- .../scripts/test_get_all_permissions.py | 5 +++++ .../unit/test_authorization_service.py | 8 ++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 793a3f9bc..19f9f418b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -551,7 +551,9 @@ class AuthorizationService: permissions_to_assign: list[PermissionToAssign] = [] - # we were thinking that if you can start an instance, you ought to be able to view your own instances. + # we were thinking that if you can start an instance, you ought to be able to: + # 1. view your own instances. + # 2. view the logs for these instances. if permission_set == "start": target_uri = f"/process-instances/{process_related_path_segment}" permissions_to_assign.append( @@ -561,6 +563,10 @@ class AuthorizationService: permissions_to_assign.append( PermissionToAssign(permission="read", target_uri=target_uri) ) + target_uri = f"/logs/{process_related_path_segment}" + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri=target_uri) + ) else: if permission_set == "all": diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py index b31c72285..95d15fbf8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py @@ -41,6 +41,11 @@ class TestGetAllPermissions(BaseTest): ) expected_permissions = [ + { + "group_identifier": "my_test_group", + "uri": "/logs/hey:group:*", + "permissions": ["read"], + }, { "group_identifier": "my_test_group", "uri": "/process-instances/hey:group:*", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index 2736693e2..d414616c5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -197,6 +197,10 @@ class TestAuthorizationService(BaseTest): ) -> None: """Test_explode_permissions_start_on_process_group.""" expected_permissions = [ + ( + "/logs/some-process-group:some-process-model:*", + "read", + ), ( "/process-instances/for-me/some-process-group:some-process-model:*", "read", @@ -255,6 +259,10 @@ class TestAuthorizationService(BaseTest): ) -> None: """Test_explode_permissions_start_on_process_model.""" expected_permissions = [ + ( + "/logs/some-process-group:some-process-model/*", + "read", + ), ( "/process-instances/for-me/some-process-group:some-process-model/*", "read", From cd2ff49ea6567d5b93a35cda7e8b32f7a3554496 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 22:30:15 -0500 Subject: [PATCH 11/59] more sentry performance tracing --- .../src/spiffworkflow_backend/__init__.py | 28 +++++++++++++++++ .../spiffworkflow_backend/config/__init__.py | 2 -- .../routes/tasks_controller.py | 30 +++++++++++++++++-- 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 46f82581a..341cfac8d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -157,6 +157,29 @@ def get_hacked_up_app_for_script() -> flask.app.Flask: return app +def traces_sampler(sampling_context: Any) -> Any: + # always inherit + if sampling_context["parent_sampled"] is not None: + return sampling_context["parent_sampled"] + + if "wsgi_environ" in sampling_context: + wsgi_environ = sampling_context["wsgi_environ"] + path_info = wsgi_environ.get("PATH_INFO") + request_method = wsgi_environ.get("REQUEST_METHOD") + + # tasks_controller.task_submit + # this is the current pain point as of 31 jan 2023. + if ( + path_info + and path_info.startswith("/v1.0/tasks/") + and request_method == "PUT" + ): + return 1 + + # Default sample rate for all others (replaces traces_sample_rate) + return 0.01 + + def configure_sentry(app: flask.app.Flask) -> None: """Configure_sentry.""" import sentry_sdk @@ -193,5 +216,10 @@ def configure_sentry(app: flask.app.Flask) -> None: # of transactions for performance monitoring. # We recommend adjusting this value to less than 1(00%) in production. traces_sample_rate=float(sentry_traces_sample_rate), + traces_sampler=traces_sampler, + # The profiles_sample_rate setting is relative to the traces_sample_rate setting. + _experiments={ + "profiles_sample_rate": 1, + }, before_send=before_send, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index d7afbeb92..64c7e2c1a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -94,8 +94,6 @@ def setup_config(app: Flask) -> None: else: print("base_permissions: no permissions file loaded") - - # unversioned (see .gitignore) config that can override everything and include secrets. # src/spiffworkflow_backend/config/secrets.py app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 8ee9f53d0..fcc0dba0f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -10,6 +10,7 @@ from typing import Union import flask.wrappers import jinja2 +import sentry_sdk from flask import current_app from flask import g from flask import jsonify @@ -326,13 +327,12 @@ def process_data_show( ) -def task_submit( +def task_submit_shared( process_instance_id: int, task_id: str, body: Dict[str, Any], terminate_loop: bool = False, ) -> flask.wrappers.Response: - """Task_submit_user_data.""" principal = _find_principal_or_raise() process_instance = _find_process_instance_by_id_or_raise(process_instance_id) if not process_instance.can_submit_task(): @@ -417,6 +417,32 @@ def task_submit( return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + sentry_op = "controller_action" + sentry_transaction_name = "tasks_controller.task_submit" + transaction = sentry_sdk.Hub.current.scope.transaction + if transaction is None: + current_app.logger.debug( + "transaction was None. pretty sure this never happens." + ) + with sentry_sdk.start_transaction(op=sentry_op, name=sentry_transaction_name): + return task_submit_shared( + process_instance_id, task_id, body, terminate_loop + ) + else: + current_app.logger.debug("transaction existed.") + with transaction.start_child(op=sentry_op, description=sentry_transaction_name): + return task_submit_shared( + process_instance_id, task_id, body, terminate_loop + ) + + def _get_tasks( processes_started_by_user: bool = True, has_lane_assignment_id: bool = True, From cd435841d792d9445a9de6ebb26e4f45bf871390 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 07:45:48 -0500 Subject: [PATCH 12/59] more spans to track performance --- .../routes/tasks_controller.py | 25 +++++++++---------- .../services/process_instance_service.py | 6 +++-- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index fcc0dba0f..feb9218e0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -380,15 +380,16 @@ def task_submit_shared( ) ) - processor.lock_process_instance("Web") - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - processor.unlock_process_instance("Web") + with sentry_sdk.start_span(op="task", description="complete_form_task"): + processor.lock_process_instance("Web") + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + processor.unlock_process_instance("Web") # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # task spec, complete that form as well. @@ -428,15 +429,13 @@ def task_submit( sentry_transaction_name = "tasks_controller.task_submit" transaction = sentry_sdk.Hub.current.scope.transaction if transaction is None: - current_app.logger.debug( - "transaction was None. pretty sure this never happens." - ) + current_app.logger.info("transaction was None. pretty sure this never happens.") with sentry_sdk.start_transaction(op=sentry_op, name=sentry_transaction_name): return task_submit_shared( process_instance_id, task_id, body, terminate_loop ) else: - current_app.logger.debug("transaction existed.") + current_app.logger.info("transaction existed.") with transaction.start_child(op=sentry_op, description=sentry_transaction_name): return task_submit_shared( process_instance_id, task_id, body, terminate_loop diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 9b07ce1f4..63c53a214 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -4,6 +4,7 @@ from typing import Any from typing import List from typing import Optional +import sentry_sdk from flask import current_app from SpiffWorkflow.task import Task as SpiffTask # type: ignore @@ -234,8 +235,9 @@ class ProcessInstanceService: # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. processor.complete_task(spiff_task, human_task, user=user) - # maybe move this out once we have the interstitial page since this is here just so we can get the next human task - processor.do_engine_steps(save=True) + with sentry_sdk.start_span(op="task", description="backend_do_engine_steps"): + # maybe move this out once we have the interstitial page since this is here just so we can get the next human task + processor.do_engine_steps(save=True) @staticmethod def extract_form_data(latest_data: dict, task: SpiffTask) -> dict: From 7da3cb0377b1ac7c46fee95796410f39ee20defe Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 07:53:35 -0500 Subject: [PATCH 13/59] avoid poetry installing deps when we have them cached if they do not change --- spiffworkflow-backend/Dockerfile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/spiffworkflow-backend/Dockerfile b/spiffworkflow-backend/Dockerfile index f4a8f8ec1..d7a4b0345 100644 --- a/spiffworkflow-backend/Dockerfile +++ b/spiffworkflow-backend/Dockerfile @@ -24,6 +24,11 @@ RUN useradd _gunicorn --no-create-home --user-group RUN apt-get update \ && apt-get install -y -q gcc libssl-dev libpq-dev +# poetry install takes a long time and can be cached if dependencies don't change, +# so that's why we tolerate running it twice. +COPY pyproject.toml poetry.lock /app/ +RUN poetry install --without dev + COPY . /app RUN poetry install --without dev From 847a2cebf2d7ad4aced5ee33bf778a91b05d1544 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 13:30:45 -0500 Subject: [PATCH 14/59] get some more insight into connector proxy timings --- .../services/service_task_service.py | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index 674ad54d6..c5401104f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -3,6 +3,7 @@ import json from typing import Any import requests +import sentry_sdk from flask import current_app from flask import g @@ -45,27 +46,27 @@ class ServiceTaskDelegate: @staticmethod def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str: """Calls a connector via the configured proxy.""" - params = { - k: ServiceTaskDelegate.check_prefixes(v["value"]) - for k, v in bpmn_params.items() - } - params["spiff__task_data"] = task_data + call_url = f"{connector_proxy_url()}/v1/do/{name}" + with sentry_sdk.start_transaction(op="call-connector", name=call_url): + params = { + k: ServiceTaskDelegate.check_prefixes(v["value"]) + for k, v in bpmn_params.items() + } + params["spiff__task_data"] = task_data - proxied_response = requests.post( - f"{connector_proxy_url()}/v1/do/{name}", json=params - ) + proxied_response = requests.post(call_url, json=params) - parsed_response = json.loads(proxied_response.text) + parsed_response = json.loads(proxied_response.text) - if "refreshed_token_set" not in parsed_response: - return proxied_response.text + if "refreshed_token_set" not in parsed_response: + return proxied_response.text - secret_key = parsed_response["auth"] - refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"]) - user_id = g.user.id if UserService.has_user() else None - SecretService().update_secret(secret_key, refreshed_token_set, user_id) + secret_key = parsed_response["auth"] + refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"]) + user_id = g.user.id if UserService.has_user() else None + SecretService().update_secret(secret_key, refreshed_token_set, user_id) - return json.dumps(parsed_response["api_response"]) + return json.dumps(parsed_response["api_response"]) class ServiceTaskService: From 808d8d4aab7f396a58381a2d89774ba0c4336963 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 13:44:12 -0500 Subject: [PATCH 15/59] there is no need to ever sentry_sdk.start_transaction because the flask integration does that --- .../routes/tasks_controller.py | 19 ++++--------------- .../services/service_task_service.py | 2 +- 2 files changed, 5 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index feb9218e0..2879c1207 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -425,21 +425,10 @@ def task_submit( terminate_loop: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" - sentry_op = "controller_action" - sentry_transaction_name = "tasks_controller.task_submit" - transaction = sentry_sdk.Hub.current.scope.transaction - if transaction is None: - current_app.logger.info("transaction was None. pretty sure this never happens.") - with sentry_sdk.start_transaction(op=sentry_op, name=sentry_transaction_name): - return task_submit_shared( - process_instance_id, task_id, body, terminate_loop - ) - else: - current_app.logger.info("transaction existed.") - with transaction.start_child(op=sentry_op, description=sentry_transaction_name): - return task_submit_shared( - process_instance_id, task_id, body, terminate_loop - ) + with sentry_sdk.start_span( + op="controller_action", description="tasks_controller.task_submit" + ): + return task_submit_shared(process_instance_id, task_id, body, terminate_loop) def _get_tasks( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index c5401104f..37af3956c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -47,7 +47,7 @@ class ServiceTaskDelegate: def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str: """Calls a connector via the configured proxy.""" call_url = f"{connector_proxy_url()}/v1/do/{name}" - with sentry_sdk.start_transaction(op="call-connector", name=call_url): + with sentry_sdk.start_span(op="call-connector", description=call_url): params = { k: ServiceTaskDelegate.check_prefixes(v["value"]) for k, v in bpmn_params.items() From 974a2e3560acbfa8577766f0315c69040c97bf22 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 17:06:34 -0500 Subject: [PATCH 16/59] bulk insert logs for performance improvement --- .../src/spiffworkflow_backend/services/logging_service.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 6a60944e6..9981e1eb8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -240,5 +240,8 @@ class DBHandler(logging.Handler): "spiff_step": spiff_step, } ) - if len(self.logs) % 1 == 0: + # so at some point we are going to insert logs. + # we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting + # on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log. + if len(self.logs) % 100 == 0: self.bulk_insert_logs() From 53cf26e8ef7f2aee9998469106d08231a0d5edba Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 09:54:19 -0500 Subject: [PATCH 17/59] add keycloak users --- .../realm_exports/spiffworkflow-realm.json | 208 ++++++++++++++++-- .../keycloak/test_user_lists/status | 8 + 2 files changed, 192 insertions(+), 24 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index a32acf00a..634caef71 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -854,6 +854,46 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "672167fd-ae79-47a7-8429-f3bb1bd4ee55", + "createdTimestamp" : 1675349217829, + "username" : "infra1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "infra1.sme@status.im", + "credentials" : [ { + "id" : "bd5843bf-98cc-4891-ab03-693a5d69078b", + "type" : "password", + "createdDate" : 1675349217863, + "secretData" : "{\"value\":\"A78sm/+e2x/N/3A7Pk05eKhfANp+ZO9BQA3LYMwpzQ5KK2D/Ot8d1plOnqMT61rTnnCgxP8dtlA6/Ws61CMTYg==\",\"salt\":\"XOOknamJPwXD1LDj6LEodA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "40891b68-121f-4fdb-86c0-0f52836d7e65", + "createdTimestamp" : 1675349217890, + "username" : "infra2.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "infra2.sme@status.im", + "credentials" : [ { + "id" : "7e9927e2-ef7f-4247-b663-1f59147a9066", + "type" : "password", + "createdDate" : 1675349217926, + "secretData" : "{\"value\":\"j4M9u8p9FDCitGpb7JXM9JWFVGvBu7R2TOYG79c+Witl7gfWppues9fFzhlFyXgC78v6diHoQ4LwCwJGJS3loQ==\",\"salt\":\"H+i8qv6ulrBEZla/v8gDDw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "1561518b-c327-491e-9db3-23c2b5394104", "createdTimestamp" : 1669303773974, @@ -1043,6 +1083,46 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "eff82d12-9a67-4002-b3c5-37811bd45199", + "createdTimestamp" : 1675349217585, + "username" : "legal.program-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.program-lead.sme@status.im", + "credentials" : [ { + "id" : "933e3fc4-398a-46c3-bc4d-783ab29a0a5b", + "type" : "password", + "createdDate" : 1675349217655, + "secretData" : "{\"value\":\"x2M9khnGK+VCykoWbZKEcHNv5QMAcumqLa7+o+STJV8UYt7BobSBn7w1r3cbyYlvkgoWIglG8S2nLDFFb6hAQg==\",\"salt\":\"/lQYRrsUY1BxNUOZSKaZwA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "8cd6feba-5ca6-4cfb-bc1a-a52c80595783", + "createdTimestamp" : 1675349217698, + "username" : "legal.project-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.project-lead.sme@status.im", + "credentials" : [ { + "id" : "908f858c-d3cd-47a9-b611-a1d48f0247e5", + "type" : "password", + "createdDate" : 1675349217733, + "secretData" : "{\"value\":\"r53SXu0dp6FrSJAVLHYrfwSKPZY9OKHfHBuJDEE2DCbZiQRH77C4sZWfUwbu/6OOhTtiBEe7gz2DQpimIDY4RQ==\",\"salt\":\"+g/OXXJEMkQiahmjSylAkw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "2a3176a0-8dd5-4223-a3e1-3cac4134e474", "createdTimestamp" : 1674148695030, @@ -1063,6 +1143,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "3d62ca4e-88bc-4302-89c1-8741c771147e", + "createdTimestamp" : 1675349217762, + "username" : "legal1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal1.sme@status.im", + "credentials" : [ { + "id" : "b774d46d-a3e8-417f-97c6-2d2102a54b0b", + "type" : "password", + "createdDate" : 1675349217799, + "secretData" : "{\"value\":\"PF21YsnIoYZLJFT/y1i2FV4OmaQj8dRsalZ9R2PK6t/jKze3ds4k+I7WVe4h2H0hMB9fo9cSQ7kt2ygxfEBheg==\",\"salt\":\"5sOkSXzRSgNz7lHfUbKzdQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7", "createdTimestamp" : 1665517010600, @@ -1225,6 +1325,46 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "ace0432f-1818-4210-8bcf-15533abfb3ce", + "createdTimestamp" : 1675349217958, + "username" : "security.program-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.program-lead.sme@status.im", + "credentials" : [ { + "id" : "602512dd-b24f-458c-9cef-7271bd8177bc", + "type" : "password", + "createdDate" : 1675349217993, + "secretData" : "{\"value\":\"vUb+t9ukHz3oHGUxaYUP34riZrshZU4c3iWpHB0OzI3y0ggCeT9xFEcmrwdkfilkKvCBJxLswlirWmgnmxZH0w==\",\"salt\":\"0hzZkDK4hPH5xgR1TpyG1Q==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "6272ac80-1d79-4e3c-a5c1-b31660560318", + "createdTimestamp" : 1675349218020, + "username" : "security.project-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.project-lead.sme@status.im", + "credentials" : [ { + "id" : "eb7673bf-50f1-40af-927b-162f536f6187", + "type" : "password", + "createdDate" : 1675349218054, + "secretData" : "{\"value\":\"E1eLmC7hCcv7I5X30TfMvpZv3MtHH+rVhgLrZnBJSUvsrXmRkHWScJ/POHQLwUgCLJeU/lKDP/f0TdO2PvHiow==\",\"salt\":\"dWM5XJIR7m/eZ0YlHmuC3A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "74374cda-1516-48e5-9ef2-1fd7bcee84d3", "createdTimestamp" : 1674148695088, @@ -1245,6 +1385,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "98faab0c-d2af-4794-8491-03dad5f30c63", + "createdTimestamp" : 1675349218087, + "username" : "security1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security1.sme@status.im", + "credentials" : [ { + "id" : "37bd6b9b-015b-4790-8a4f-883c47035bc4", + "type" : "password", + "createdDate" : 1675349218122, + "secretData" : "{\"value\":\"BJP9K4qIdnaDnE3meM2GLWMFdSJryxcZovtKDlZNaQXfSUH3X1mOJfaLXQsuTWJzSMIow8XZ5+ye47ZNabLCaQ==\",\"salt\":\"BqD7jPpdB7PzU6QTN5dpMA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "487d3a85-89dd-4839-957a-c3f6d70551f6", "createdTimestamp" : 1657115173081, @@ -2514,7 +2674,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2532,7 +2692,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2622,7 +2782,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "a91920d9-792e-486f-9a02-49fe00857ce5", + "id" : "feafc299-fede-4880-9e23-eb81aca22808", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2644,7 +2804,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "6b8f504c-39fb-4608-9223-52deb5ae0dfe", + "id" : "ce7904d0-9182-49a2-aa71-a7b43e21f3ac", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2673,7 +2833,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ac4dd6f3-43b2-4212-90eb-4df7c9a6a0bc", + "id" : "d9c6909a-5cc1-4ddf-b297-dbfcf6e609a6", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2695,7 +2855,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "726b4a58-cb78-4105-a34c-3e4404c74362", + "id" : "083a589e-a486-42b6-ae73-1ec983967ff5", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2717,7 +2877,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "be1b5f5d-b80f-46a6-804b-bce20e2de246", + "id" : "7f0248b0-2d51-4175-9fd2-52b606a39e26", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2739,7 +2899,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ff5097d8-818a-4176-8512-caf9d81eb6db", + "id" : "44465f1f-c700-4ec0-a234-d95c994c9e25", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2761,7 +2921,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b9ecf989-e87b-45c0-a440-bce46b473dec", + "id" : "8cf09055-5b98-4fc8-b867-3dffacdec21b", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2783,7 +2943,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4554310c-e125-4834-a84e-53bbec7a79d6", + "id" : "16b50b3e-4240-4f49-a85e-1bfd40def300", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2806,7 +2966,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "204549aa-c931-45a2-b2f0-1a5a0c724935", + "id" : "2aa981ae-d67e-49fb-95a4-91de1e5ab724", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2828,7 +2988,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d02f58b1-6469-46ea-a348-d923b5aa9727", + "id" : "cf8406f7-09c3-4614-a898-99c9d66746f6", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -2864,7 +3024,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7ef6a658-be09-4b81-91ac-f21dc80b0841", + "id" : "e1ec7d6e-7612-4c5b-afce-c7f4fddbf6ec", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -2900,7 +3060,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f7f2eeab-6455-4a18-a98d-b1a5f04e35fb", + "id" : "f5862b09-6e01-4c88-b44e-26dc59d71b80", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -2929,7 +3089,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c44389c2-08b2-4adb-a6e9-e41006cb20c7", + "id" : "7caa8611-8b13-437e-83b2-556899b5444f", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -2944,7 +3104,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "edf00de8-8f19-4a32-98c4-15e719c1fadd", + "id" : "91d40deb-344f-4e0b-a845-98b2fc4a633a", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -2967,7 +3127,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "58415605-eb47-41b3-a07f-90bbbbcb9963", + "id" : "f221b5e6-1bcc-4b37-ba61-4d3bc6a30a8b", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -2989,7 +3149,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1eae6099-3e1e-484b-ad94-b09339affb68", + "id" : "3ed8e597-19af-4ec8-b532-a97311f52de3", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3011,7 +3171,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8af03739-b77a-4582-ab63-a1855ca4f637", + "id" : "3970fd16-3786-4eb3-9efe-453d0984b18b", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3027,7 +3187,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0c308998-c5ad-4cf8-ab5c-15be89cbe4d7", + "id" : "e26b27b4-c957-491c-bb6d-9d226b22399c", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3063,7 +3223,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "5510aa65-e78d-4d08-a3ca-31e277bc3cd0", + "id" : "3ae37429-a623-42e3-a4a1-f9586b96b730", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3099,7 +3259,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b6b3e35d-8df3-487e-b2d2-9fdf524a4181", + "id" : "7606ecd5-eb13-4aee-bd9f-3ec4ce77c59c", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3115,13 +3275,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "a2e9294b-74ce-4ea6-8372-9d9fb3d60a06", + "id" : "058b3c89-4ea4-43fa-b337-e523b1d93ec3", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "de65a90c-cc4b-4bf0-8e84-756e23a504f0", + "id" : "21410ac7-4b82-4f19-aae2-43ac33ba3f8f", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 651e76daf..667c4f033 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -15,3 +15,11 @@ dao.project.lead@status.im desktop.project.lead@status.im app.program.lead@status.im desktop.program.lead@status.im +legal.program-lead.sme@status.im +legal.project-lead.sme@status.im +legal1.sme@status.im +infra1.sme@status.im +infra2.sme@status.im +security.program-lead.sme@status.im +security.project-lead.sme@status.im +security1.sme@status.im From f4ff86f9b0216b88dbb7ca9b89b44f9a00c1bce2 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Thu, 2 Feb 2023 10:24:55 -0500 Subject: [PATCH 18/59] Allow for different Python Environments when executing scripts within SpiffWorkflow (#121) --- spiffworkflow-backend/poetry.lock | 5 +- .../services/process_instance_processor.py | 184 ++++++++++++++++-- .../services/script_unit_test_runner.py | 2 + .../scripts/test_get_localtime.py | 3 +- 4 files changed, 168 insertions(+), 26 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 312890e5d..733c84ac1 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1825,7 +1825,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331" +resolved_reference = "64737498caa36c25b12f5216bdc9c30338b2a1fa" [[package]] name = "SQLAlchemy" @@ -2863,10 +2863,7 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, - {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 7cec48a19..40458838e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -26,8 +26,10 @@ from lxml import etree # type: ignore from lxml.etree import XMLSyntaxError # type: ignore from RestrictedPython import safe_globals # type: ignore from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore -from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore -from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ignore +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore @@ -150,6 +152,132 @@ class ProcessInstanceLockedBySomethingElseError(Exception): pass +class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore + def __init__(self, environment_globals: Dict[str, Any]): + """BoxedTaskDataBasedScriptEngineEnvironment.""" + self._last_result: Dict[str, Any] = {} + super().__init__(environment_globals) + + def execute( + self, + script: str, + context: Dict[str, Any], + external_methods: Optional[Dict[str, Any]] = None, + ) -> None: + super().execute(script, context, external_methods) + self._last_result = context + + def last_result(self) -> Dict[str, Any]: + return self._last_result + + def clear_state(self) -> None: + pass + + def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + def revise_state_with_task_data(self, task: SpiffTask) -> None: + pass + + +class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment): # type: ignore + PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state" + + def __init__(self, environment_globals: Dict[str, Any]): + """NonTaskDataBasedScriptEngineEnvironment.""" + self.state: Dict[str, Any] = {} + self.non_user_defined_keys = set( + [*environment_globals.keys()] + ["__builtins__", "current_user"] + ) + super().__init__(environment_globals) + + def evaluate( + self, + expression: str, + context: Dict[str, Any], + external_methods: Optional[dict[str, Any]] = None, + ) -> Any: + # TODO: once integrated look at the tests that fail without Box + Box.convert_to_box(context) + state = {} + state.update(self.globals) + state.update(external_methods or {}) + state.update(self.state) + state.update(context) + return eval(expression, state) # noqa + + def execute( + self, + script: str, + context: Dict[str, Any], + external_methods: Optional[Dict[str, Any]] = None, + ) -> None: + # TODO: once integrated look at the tests that fail without Box + Box.convert_to_box(context) + self.state.update(self.globals) + self.state.update(external_methods or {}) + self.state.update(context) + exec(script, self.state) # noqa + + self.state = self._user_defined_state(external_methods) + + # the task data needs to be updated with the current state so data references can be resolved properly. + # the state will be removed later once the task is completed. + context.update(self.state) + + def _user_defined_state( + self, external_methods: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + keys_to_filter = self.non_user_defined_keys + if external_methods is not None: + keys_to_filter |= set(external_methods.keys()) + + return { + k: v + for k, v in self.state.items() + if k not in keys_to_filter and not callable(v) + } + + def last_result(self) -> Dict[str, Any]: + return self.state + + def clear_state(self) -> None: + self.state = {} + + def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + key = self.PYTHON_ENVIRONMENT_STATE_KEY + state = self._user_defined_state() + bpmn_process_instance.data[key] = state + + def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + key = self.PYTHON_ENVIRONMENT_STATE_KEY + self.state = bpmn_process_instance.data.get(key, {}) + + def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None: + bpmn_process_instance.data.update(self._user_defined_state()) + + def revise_state_with_task_data(self, task: SpiffTask) -> None: + state_keys = set(self.state.keys()) + task_data_keys = set(task.data.keys()) + state_keys_to_remove = state_keys - task_data_keys + task_data_keys_to_keep = task_data_keys - state_keys + + self.state = { + k: v for k, v in self.state.items() if k not in state_keys_to_remove + } + task.data = {k: v for k, v in task.data.items() if k in task_data_keys_to_keep} + + +class CustomScriptEngineEnvironment(BoxedTaskDataBasedScriptEngineEnvironment): + pass + + class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore """This is a custom script processor that can be easily injected into Spiff Workflow. @@ -179,7 +307,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore default_globals.update(safe_globals) default_globals["__builtins__"]["__import__"] = _import - super().__init__(default_globals=default_globals) + environment = CustomScriptEngineEnvironment(default_globals) + + super().__init__(environment=environment) def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]: """__get_augment_methods.""" @@ -392,7 +522,7 @@ class ProcessInstanceProcessor: validate_only, subprocesses=subprocesses, ) - self.bpmn_process_instance.script_engine = self._script_engine + self.set_script_engine(self.bpmn_process_instance) self.add_user_info_to_process_instance(self.bpmn_process_instance) except MissingSpecError as ke: @@ -438,6 +568,18 @@ class ProcessInstanceProcessor: bpmn_process_spec, subprocesses ) + @staticmethod + def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None: + ProcessInstanceProcessor._script_engine.environment.restore_state( + bpmn_process_instance + ) + bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine + + def preserve_script_engine_state(self) -> None: + ProcessInstanceProcessor._script_engine.environment.preserve_state( + self.bpmn_process_instance + ) + def current_user(self) -> Any: """Current_user.""" current_user = None @@ -470,11 +612,12 @@ class ProcessInstanceProcessor: subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, ) -> BpmnWorkflow: """Get_bpmn_process_instance_from_workflow_spec.""" - return BpmnWorkflow( + bpmn_process_instance = BpmnWorkflow( spec, - script_engine=ProcessInstanceProcessor._script_engine, subprocess_specs=subprocesses, ) + ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) + return bpmn_process_instance @staticmethod def __get_bpmn_process_instance( @@ -501,9 +644,7 @@ class ProcessInstanceProcessor: finally: spiff_logger.setLevel(original_spiff_logger_log_level) - bpmn_process_instance.script_engine = ( - ProcessInstanceProcessor._script_engine - ) + ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) else: bpmn_process_instance = ( ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( @@ -1384,25 +1525,25 @@ class ProcessInstanceProcessor: def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: """Do_engine_steps.""" step_details = [] + + def did_complete_task(task: SpiffTask) -> None: + self._script_engine.environment.revise_state_with_task_data(task) + step_details.append(self.spiff_step_details_mapping()) + try: - self.bpmn_process_instance.refresh_waiting_tasks( - # - # commenting out to see if this helps with the growing spiff steps/db issue - # - # will_refresh_task=lambda t: self.increment_spiff_step(), - # did_refresh_task=lambda t: step_details.append( - # self.spiff_step_details_mapping() - # ), - ) + self.bpmn_process_instance.refresh_waiting_tasks() self.bpmn_process_instance.do_engine_steps( exit_at=exit_at, will_complete_task=lambda t: self.increment_spiff_step(), - did_complete_task=lambda t: step_details.append( - self.spiff_step_details_mapping() - ), + did_complete_task=did_complete_task, ) + if self.bpmn_process_instance.is_completed(): + self._script_engine.environment.finalize_result( + self.bpmn_process_instance + ) + self.process_bpmn_messages() self.queue_waiting_receive_messages() @@ -1466,6 +1607,7 @@ class ProcessInstanceProcessor: def serialize(self) -> str: """Serialize.""" self.check_task_data_size() + self.preserve_script_engine_state() return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore def next_user_tasks(self) -> list[SpiffTask]: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py index 1fafb5487..310f53e98 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py @@ -45,6 +45,7 @@ class ScriptUnitTestRunner: context = input_context.copy() try: + cls._script_engine.environment.clear_state() cls._script_engine._execute(context=context, script=script) except SyntaxError as ex: return ScriptUnitTestResult( @@ -77,6 +78,7 @@ class ScriptUnitTestRunner: error=f"Failed to execute script: {error_message}", ) + context = cls._script_engine.environment.last_result() result_as_boolean = context == expected_output_context script_unit_test_result = ScriptUnitTestResult( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index 90e4158da..8116ec423 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -87,7 +87,8 @@ class TestGetLocaltime(BaseTest): ) assert spiff_task - data = spiff_task.data + + data = ProcessInstanceProcessor._script_engine.environment.last_result() some_time = data["some_time"] localtime = data["localtime"] timezone = data["timezone"] From 18a23a729ac131109d9374aec2550965a11eb0b5 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Thu, 2 Feb 2023 14:44:37 -0500 Subject: [PATCH 19/59] File download from workflow data (#122) --- .../src/spiffworkflow_backend/api.yml | 39 +++++++++++++ .../routes/process_api_blueprint.py | 57 ++++++++++++++++++- .../src/spiffworkflow_backend/routes/user.py | 5 ++ .../scripts/markdown_file_download_link.py | 51 +++++++++++++++++ 4 files changed, 151 insertions(+), 1 deletion(-) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 825a24b4a..326d55b6e 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1605,6 +1605,45 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-data-file-download/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_data_identifier + in: path + required: true + description: The identifier of the process data. + schema: + type: string + - name: index + in: query + required: false + description: The optional index of the value if key's value is an array + schema: + type: integer + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_file_download + summary: Download the file referneced in the process data value. + tags: + - Data Objects + responses: + "200": + description: Fetch succeeded. + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + /send-event/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 0e9bd581d..82263475b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,7 +1,9 @@ """APIs for dealing with process groups, process models, and process instances.""" +import base64 import json from typing import Any from typing import Dict +from typing import Optional import flask.wrappers from flask import Blueprint @@ -81,10 +83,12 @@ def process_list() -> Any: return SpecReferenceSchema(many=True).dump(references) -def process_data_show( +def _process_data_fetcher( process_instance_id: int, process_data_identifier: str, modified_process_model_identifier: str, + download_file_data: bool, + index: Optional[int] = None, ) -> flask.wrappers.Response: """Process_data_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -94,6 +98,26 @@ def process_data_show( if process_data_identifier in all_process_data: process_data_value = all_process_data[process_data_identifier] + if process_data_value is not None and index is not None: + process_data_value = process_data_value[index] + + if ( + download_file_data + and isinstance(process_data_value, str) + and process_data_value.startswith("data:") + ): + parts = process_data_value.split(";") + mimetype = parts[0][4:] + filename = parts[1] + base64_value = parts[2].split(",")[1] + file_contents = base64.b64decode(base64_value) + + return Response( + file_contents, + mimetype=mimetype, + headers={"Content-disposition": f"attachment; filename={filename}"}, + ) + return make_response( jsonify( { @@ -105,6 +129,37 @@ def process_data_show( ) +def process_data_show( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_data_show.""" + return _process_data_fetcher( + process_instance_id, + process_data_identifier, + modified_process_model_identifier, + False, + None, + ) + + +def process_data_file_download( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, + index: Optional[int] = None, +) -> flask.wrappers.Response: + """Process_data_file_download.""" + return _process_data_fetcher( + process_instance_id, + process_data_identifier, + modified_process_model_identifier, + True, + index, + ) + + # sample body: # {"ref": "refs/heads/main", "repository": {"name": "sample-process-models", # "full_name": "sartography/sample-process-models", "private": False .... }} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 6873198a3..6fd7d39c6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -17,6 +17,7 @@ from flask import request from werkzeug.wrappers import Response from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authentication_service import AuthenticationService from spiffworkflow_backend.services.authentication_service import ( @@ -58,6 +59,10 @@ def verify_token( if not token and "Authorization" in request.headers: token = request.headers["Authorization"].removeprefix("Bearer ") + if not token and "access_token" in request.cookies: + if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/"): + token = request.cookies["access_token"] + # This should never be set here but just in case _clear_auth_tokens_from_thread_local_data() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py new file mode 100644 index 000000000..3952525be --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -0,0 +1,51 @@ +"""Markdown_file_download_link.""" +from typing import Any +from urllib.parse import unquote + +from flask import current_app + +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script + + +class GetMarkdownFileDownloadLink(Script): + """GetMarkdownFileDownloadLink.""" + + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + + def get_description(self) -> str: + """Get_description.""" + return """Returns a string which is a string in markdown format.""" + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *_args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + # example input: + # "data:application/pdf;name=Harmeet_1234.pdf;base64,JV...." + process_data_identifier = kwargs["key"] + parts = kwargs["file_data"].split(";") + file_index = kwargs["file_index"] + label = unquote(parts[1].split("=")[1]) + process_model_identifier = script_attributes_context.process_model_identifier + modified_process_model_identifier = ( + ProcessModelInfo.modify_process_identifier_for_path_param( + process_model_identifier + ) + ) + process_instance_id = script_attributes_context.process_instance_id + url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] + url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + f"{process_instance_id}/{process_data_identifier}?index={file_index}" + link = f"[{label}]({url})" + + return link From f1068ea8c1f7dfd1db480f111f440327a9698da2 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Thu, 2 Feb 2023 15:04:57 -0500 Subject: [PATCH 20/59] Quick fix for url building --- .../scripts/markdown_file_download_link.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py index 3952525be..d1b3af7f9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -44,8 +44,8 @@ class GetMarkdownFileDownloadLink(Script): ) process_instance_id = script_attributes_context.process_instance_id url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] - url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" - f"{process_instance_id}/{process_data_identifier}?index={file_index}" + url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + \ + f"{process_instance_id}/{process_data_identifier}?index={file_index}" link = f"[{label}]({url})" return link From 125f8eba0375453d9ca2a8eaf40e4859e9914652 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 15:40:01 -0500 Subject: [PATCH 21/59] simplify spiff integration post serializer update, w/ elizabeth and jon --- spiffworkflow-backend/poetry.lock | 5 +- .../scripts/markdown_file_download_link.py | 6 ++- .../services/process_instance_processor.py | 51 +++---------------- 3 files changed, 15 insertions(+), 47 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 733c84ac1..570faf859 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1825,7 +1825,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "64737498caa36c25b12f5216bdc9c30338b2a1fa" +resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad" [[package]] name = "SQLAlchemy" @@ -2546,6 +2546,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2554,6 +2555,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2562,6 +2564,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py index d1b3af7f9..25f81cc7b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -44,8 +44,10 @@ class GetMarkdownFileDownloadLink(Script): ) process_instance_id = script_attributes_context.process_instance_id url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] - url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + \ - f"{process_instance_id}/{process_data_identifier}?index={file_index}" + url += ( + f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + + f"{process_instance_id}/{process_data_identifier}?index={file_index}" + ) link = f"[{label}]({url})" return link diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 40458838e..b45add697 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -38,36 +38,14 @@ from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignor from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore -from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore -from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - CallActivityTaskConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( +from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore +from SpiffWorkflow.spiff.serializer.task_spec_converters import ( # type: ignore EventBasedGatewayConverter, ) -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - IntermediateCatchEventConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - IntermediateThrowEventConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import ManualTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ReceiveTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ScriptTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import SendTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ServiceTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import StartEventConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import SubWorkflowTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - TransactionSubprocessConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverter from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore @@ -110,6 +88,8 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.user_service import UserService +SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter) + # Sorry about all this crap. I wanted to move this thing to another file, but # importing a bunch of types causes circular imports. @@ -408,26 +388,9 @@ class ProcessInstanceProcessor: _script_engine = CustomBpmnScriptEngine() SERIALIZER_VERSION = "1.0-spiffworkflow-backend" + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter( - [ - BoundaryEventConverter, - BusinessRuleTaskConverter, - CallActivityTaskConverter, - EndEventConverter, - IntermediateCatchEventConverter, - IntermediateThrowEventConverter, - EventBasedGatewayConverter, - ManualTaskConverter, - NoneTaskConverter, - ReceiveTaskConverter, - ScriptTaskConverter, - SendTaskConverter, - ServiceTaskConverter, - StartEventConverter, - SubWorkflowTaskConverter, - TransactionSubprocessConverter, - UserTaskConverter, - ] + SPIFF_SPEC_CONFIG ) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) _event_serializer = EventBasedGatewayConverter() From c449d1785284c33099fb7b1a82946aa0ea35255f Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 19:00:58 -0500 Subject: [PATCH 22/59] try to improve exception handling by avoiding raising ApiError from services --- .../exceptions/api_error.py | 31 +++++++++++++--- .../services/authentication_service.py | 24 ++++++++++--- .../services/authorization_service.py | 36 ++++++++----------- .../src/components/ProcessGroupForm.tsx | 2 +- 4 files changed, 63 insertions(+), 30 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 02a66a207..ab5bf1c3a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -20,6 +20,11 @@ from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.specs.base import TaskSpec # type: ignore from SpiffWorkflow.task import Task # type: ignore +from spiffworkflow_backend.services.authentication_service import NotAuthorizedError +from spiffworkflow_backend.services.authentication_service import TokenInvalidError +from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError +from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError + api_error_blueprint = Blueprint("api_error_blueprint", __name__) @@ -172,7 +177,12 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: set_user_sentry_context() sentry_link = None - if not isinstance(exception, ApiError) or exception.error_code != "invalid_token": + # we want to capture_exception to log the exception to sentry, but we don't want to log: + # 1. ApiErrors that are just invalid tokens + # 2. NotAuthorizedError + if ( + not isinstance(exception, ApiError) or exception.error_code != "invalid_token" + ) and not isinstance(exception, NotAuthorizedError): id = capture_exception(exception) if isinstance(exception, ApiError): @@ -193,17 +203,30 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: # an event id or send out tags like username current_app.logger.exception(exception) + error_code = "internal_server_error" + status_code = 500 + if ( + isinstance(exception, NotAuthorizedError) + or isinstance(exception, TokenNotProvidedError) + or isinstance(exception, TokenInvalidError) + ): + error_code = "not_authorized" + status_code = 403 + if isinstance(exception, UserNotLoggedInError): + error_code = "not_authenticated" + status_code = 401 + # set api_exception like this to avoid confusing mypy - # and what type the object is + # about what type the object is api_exception = None if isinstance(exception, ApiError): api_exception = exception else: api_exception = ApiError( - error_code="internal_server_error", + error_code=error_code, message=f"{exception.__class__.__name__}", sentry_link=sentry_link, - status_code=500, + status_code=status_code, ) return make_response(jsonify(api_exception), api_exception.status_code) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 1793aab64..5c9c47082 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -11,7 +11,6 @@ from flask import current_app from flask import redirect from werkzeug.wrappers import Response -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.refresh_token import RefreshTokenModel @@ -20,7 +19,21 @@ class MissingAccessTokenError(Exception): """MissingAccessTokenError.""" +class NotAuthorizedError(Exception): + pass + + +class RefreshTokenStorageError(Exception): + pass + + +class UserNotLoggedInError(Exception): + pass + + # These could be either 'id' OR 'access' tokens and we can't always know which + + class TokenExpiredError(Exception): """TokenExpiredError.""" @@ -29,6 +42,10 @@ class TokenInvalidError(Exception): """TokenInvalidError.""" +class TokenNotProvidedError(Exception): + pass + + class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -183,9 +200,8 @@ class AuthenticationService: db.session.commit() except Exception as e: db.session.rollback() - raise ApiError( - error_code="store_refresh_token_error", - message=f"We could not store the refresh token. Original error is {e}", + raise RefreshTokenStorageError( + f"We could not store the refresh token. Original error is {e}", ) from e @staticmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 19f9f418b..a72effd46 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -21,7 +21,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from sqlalchemy import or_ from sqlalchemy import text -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel @@ -34,6 +33,11 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint +from spiffworkflow_backend.services.authentication_service import NotAuthorizedError +from spiffworkflow_backend.services.authentication_service import TokenExpiredError +from spiffworkflow_backend.services.authentication_service import TokenInvalidError +from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError +from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.user_service import UserService @@ -98,20 +102,16 @@ class AuthorizationService: def verify_sha256_token(cls, auth_header: Optional[str]) -> None: """Verify_sha256_token.""" if auth_header is None: - raise ApiError( - error_code="unauthorized", - message="", - status_code=403, + raise TokenNotProvidedError( + "unauthorized", ) received_sign = auth_header.split("sha256=")[-1].strip() secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode() expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest() if not compare_digest(received_sign, expected_sign): - raise ApiError( - error_code="unauthorized", - message="", - status_code=403, + raise TokenInvalidError( + "unauthorized", ) @classmethod @@ -393,10 +393,8 @@ class AuthorizationService: authorization_exclusion_list = ["permissions_check"] if not hasattr(g, "user"): - raise ApiError( - error_code="user_not_logged_in", - message="User is not logged in. Please log in", - status_code=401, + raise UserNotLoggedInError( + "User is not logged in. Please log in", ) api_view_function = current_app.view_functions[request.endpoint] @@ -416,13 +414,11 @@ class AuthorizationService: if has_permission: return None - raise ApiError( - error_code="unauthorized", - message=( + raise NotAuthorizedError( + ( f"User {g.user.username} is not authorized to perform requested action:" f" {permission_string} - {request.path}" ), - status_code=403, ) @staticmethod @@ -440,13 +436,11 @@ class AuthorizationService: payload = jwt.decode(auth_token, options={"verify_signature": False}) return payload except jwt.ExpiredSignatureError as exception: - raise ApiError( - "token_expired", + raise TokenExpiredError( "The Authentication token you provided expired and must be renewed.", ) from exception except jwt.InvalidTokenError as exception: - raise ApiError( - "token_invalid", + raise TokenInvalidError( ( "The Authentication token you provided is invalid. You need a new" " token. " diff --git a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx index 79ab8253e..c82157d7d 100644 --- a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx @@ -35,7 +35,7 @@ export default function ProcessGroupForm({ }; const hasValidIdentifier = (identifierToCheck: string) => { - return identifierToCheck.match(/^[a-z0-9][0-9a-z-]+[a-z0-9]$/); + return identifierToCheck.match(/^[a-z0-9][0-9a-z-]*[a-z0-9]$/); }; const handleFormSubmission = (event: any) => { From 02855719b8100969a395d0acf7f2537a8c69646a Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 20:59:28 -0500 Subject: [PATCH 23/59] Squashed 'SpiffWorkflow/' changes from 98c6294f..0e61be85 0e61be85 Merge pull request #289 from sartography/improvement/execution-and-serialization-cleanup 527684da fix some typos in the class & method docs 0dff44a4 Merge branch 'main' into improvement/execution-and-serialization-cleanup 64737498 Allow for other PythonScriptEngine environments besides task data (#288) dd63e916 remove some unused tests & diagrams 24aae519 clean up various small stuff 3b2dc35d use context when opening files for parsing 69eec3eb update class/method docs 24528dfb move all spec conversion classes to top level 5af33b11 remove some unused methods related to old serializer 931b90fb reorganize serializer 4e81ed29 consolidate pointless serializer classes d62acf02 change task_spec._update_hook to return a boolean indicating whether the task is ready git-subtree-dir: SpiffWorkflow git-subtree-split: 0e61be85c47474a33037e6f398e64c96e02f13ad --- SpiffWorkflow/bpmn/FeelLikeScriptEngine.py | 4 +- SpiffWorkflow/bpmn/PythonScriptEngine.py | 121 +- .../bpmn/PythonScriptEngineEnvironment.py | 122 ++ SpiffWorkflow/bpmn/parser/BpmnParser.py | 5 +- .../bpmn/serializer/bpmn_converters.py | 348 ----- .../bpmn/serializer/event_definition.py | 127 ++ .../bpmn/serializer/helpers/__init__.py | 0 .../serializer/{ => helpers}/dictionary.py | 16 +- .../bpmn/serializer/helpers/registry.py | 33 + SpiffWorkflow/bpmn/serializer/helpers/spec.py | 248 ++++ ...flow_spec_converter.py => process_spec.py} | 78 +- SpiffWorkflow/bpmn/serializer/task_spec.py | 292 ++++ .../bpmn/serializer/task_spec_converters.py | 323 ----- SpiffWorkflow/bpmn/serializer/workflow.py | 96 +- SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py | 54 - SpiffWorkflow/bpmn/specs/SubWorkflowTask.py | 17 +- .../bpmn/specs/events/IntermediateEvent.py | 7 +- .../bpmn/specs/events/event_types.py | 14 +- SpiffWorkflow/bpmn/workflow.py | 3 + SpiffWorkflow/camunda/serializer/config.py | 15 + .../camunda/serializer/event_definition.py | 20 + SpiffWorkflow/camunda/serializer/task_spec.py | 34 + .../serializer/task_spec_converters.py | 90 -- SpiffWorkflow/dmn/parser/BpmnDmnParser.py | 5 +- .../{task_spec_converters.py => task_spec.py} | 8 +- SpiffWorkflow/specs/AcquireMutex.py | 6 +- SpiffWorkflow/specs/Celery.py | 4 +- SpiffWorkflow/specs/Execute.py | 4 +- SpiffWorkflow/specs/Gate.py | 2 +- SpiffWorkflow/specs/Join.py | 26 +- SpiffWorkflow/specs/SubWorkflow.py | 2 +- SpiffWorkflow/specs/Transform.py | 2 +- SpiffWorkflow/specs/base.py | 23 +- SpiffWorkflow/spiff/event_definition.py | 0 SpiffWorkflow/spiff/serializer/config.py | 65 + .../spiff/serializer/event_definition.py | 20 + SpiffWorkflow/spiff/serializer/task_spec.py | 115 ++ .../spiff/serializer/task_spec_converters.py | 170 --- SpiffWorkflow/task.py | 15 +- tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py | 2 +- .../SpiffWorkflow/bpmn/BpmnLoaderForTests.py | 7 +- .../bpmn/BpmnWorkflowTestCase.py | 7 +- tests/SpiffWorkflow/bpmn/CustomScriptTest.py | 5 +- .../bpmn/FeelExpressionEngineTest.py | 5 +- .../SpiffWorkflow/bpmn/NavListMulipleEnds.py | 47 - .../bpmn/PythonScriptEngineEnvironmentTest.py | 80 ++ tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py | 7 +- .../bpmn/data/ComplexNavigation.bpmn | 746 ---------- ...ExclusiveGatewayMultipleEndNavigation.bpmn | 143 -- .../bpmn/data/NavLeapFrogLong.bpmn | 1209 ----------------- .../bpmn/data/invalid_process_sub.bpmn | 39 - tests/SpiffWorkflow/bpmn/data/rrt.bpmn | 336 ----- .../bpmn/data/serialization/v1.0.json | 6 +- .../bpmn/data/task_data_size.bpmn | 81 ++ .../data/timer_event_changes_last_task.bpmn | 77 -- .../bpmn/events/EventBasedGatewayTest.py | 5 +- .../bpmn/events/TimerCycleStartTest.py | 7 +- .../bpmn/events/TimerCycleTest.py | 7 +- .../bpmn/events/TimerDateTest.py | 5 +- .../events/TimerDurationBoundaryOnTaskTest.py | 3 +- .../bpmn/events/TimerDurationTest.py | 3 +- .../bpmn/serializer/BaseTestCase.py | 3 +- .../serializer/BpmnWorkflowSerializerTest.py | 3 +- .../bpmn/serializer/VersionMigrationTest.py | 6 +- tests/SpiffWorkflow/camunda/BaseTestCase.py | 12 +- .../camunda/CallActivityMessageTest.py | 2 +- .../camunda/DMNCustomScriptTest.py | 5 +- .../camunda/MessageBoundaryEventTest.py | 3 +- .../camunda/MultiInstanceDMNTest.py | 10 +- .../camunda/data/exclusive_gateway_pmi.bpmn | 94 -- .../camunda/data/random_fact.svg | 4 - .../camunda/data/top_workflow.bpmn | 64 - .../camunda/specs/UserTaskSpecTest.py | 5 +- tests/SpiffWorkflow/dmn/DecisionRunner.py | 2 +- tests/SpiffWorkflow/dmn/HitPolicyTest.py | 13 +- .../dmn/feel_engine/FeelDictDecisionTest.py | 4 +- .../FeelDictDotNotationDecisionTest.py | 2 +- .../DictDotNotationDecisionTest.py | 2 +- ...tDotNotationDecisionWeirdCharactersTest.py | 2 +- .../dmn/python_engine/PythonDecisionRunner.py | 5 +- tests/SpiffWorkflow/spiff/BaseTestCase.py | 22 +- 81 files changed, 1479 insertions(+), 4145 deletions(-) create mode 100644 SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py delete mode 100644 SpiffWorkflow/bpmn/serializer/bpmn_converters.py create mode 100644 SpiffWorkflow/bpmn/serializer/event_definition.py create mode 100644 SpiffWorkflow/bpmn/serializer/helpers/__init__.py rename SpiffWorkflow/bpmn/serializer/{ => helpers}/dictionary.py (91%) create mode 100644 SpiffWorkflow/bpmn/serializer/helpers/registry.py create mode 100644 SpiffWorkflow/bpmn/serializer/helpers/spec.py rename SpiffWorkflow/bpmn/serializer/{workflow_spec_converter.py => process_spec.py} (67%) create mode 100644 SpiffWorkflow/bpmn/serializer/task_spec.py delete mode 100644 SpiffWorkflow/bpmn/serializer/task_spec_converters.py create mode 100644 SpiffWorkflow/camunda/serializer/config.py create mode 100644 SpiffWorkflow/camunda/serializer/event_definition.py create mode 100644 SpiffWorkflow/camunda/serializer/task_spec.py delete mode 100644 SpiffWorkflow/camunda/serializer/task_spec_converters.py rename SpiffWorkflow/dmn/serializer/{task_spec_converters.py => task_spec.py} (93%) create mode 100644 SpiffWorkflow/spiff/event_definition.py create mode 100644 SpiffWorkflow/spiff/serializer/config.py create mode 100644 SpiffWorkflow/spiff/serializer/event_definition.py create mode 100644 SpiffWorkflow/spiff/serializer/task_spec.py delete mode 100644 SpiffWorkflow/spiff/serializer/task_spec_converters.py delete mode 100644 tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py create mode 100644 tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py delete mode 100644 tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/rrt.bpmn create mode 100644 tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn delete mode 100644 tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn delete mode 100644 tests/SpiffWorkflow/camunda/data/random_fact.svg delete mode 100644 tests/SpiffWorkflow/camunda/data/top_workflow.bpmn diff --git a/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py b/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py index fe10c5501..a4db53082 100644 --- a/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py +++ b/SpiffWorkflow/bpmn/FeelLikeScriptEngine.py @@ -266,8 +266,8 @@ class FeelLikeScriptEngine(PythonScriptEngine): provide a specialised subclass that parses and executes the scripts / expressions in a mini-language of your own. """ - def __init__(self): - super().__init__() + def __init__(self, environment=None): + super().__init__(environment=environment) def validate(self, expression): super().validate(self.patch_expression(expression)) diff --git a/SpiffWorkflow/bpmn/PythonScriptEngine.py b/SpiffWorkflow/bpmn/PythonScriptEngine.py index 38279a1da..882d76bd1 100644 --- a/SpiffWorkflow/bpmn/PythonScriptEngine.py +++ b/SpiffWorkflow/bpmn/PythonScriptEngine.py @@ -3,7 +3,9 @@ import ast import copy import sys import traceback +import warnings +from .PythonScriptEngineEnvironment import TaskDataEnvironment from ..exceptions import SpiffWorkflowException, WorkflowTaskException from ..operators import Operator @@ -26,66 +28,6 @@ from ..operators import Operator # 02110-1301 USA -class Box(dict): - """ - Example: - m = Box({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer']) - """ - - def __init__(self, *args, **kwargs): - super(Box, self).__init__(*args, **kwargs) - for arg in args: - if isinstance(arg, dict): - for k, v in arg.items(): - if isinstance(v, dict): - self[k] = Box(v) - else: - self[k] = v - - if kwargs: - for k, v in kwargs.items(): - if isinstance(v, dict): - self[k] = Box(v) - else: - self[k] = v - - def __deepcopy__(self, memodict=None): - if memodict is None: - memodict = {} - my_copy = Box() - for k, v in self.items(): - my_copy[k] = copy.deepcopy(v) - return my_copy - - def __getattr__(self, attr): - try: - output = self[attr] - except: - raise AttributeError( - "Dictionary has no attribute '%s' " % str(attr)) - return output - - def __setattr__(self, key, value): - self.__setitem__(key, value) - - def __setitem__(self, key, value): - super(Box, self).__setitem__(key, value) - self.__dict__.update({key: value}) - - def __getstate__(self): - return self.__dict__ - - def __setstate__(self, state): - self.__init__(state) - - def __delattr__(self, item): - self.__delitem__(item) - - def __delitem__(self, key): - super(Box, self).__delitem__(key) - del self.__dict__[key] - - class PythonScriptEngine(object): """ This should serve as a base for all scripting & expression evaluation @@ -97,10 +39,18 @@ class PythonScriptEngine(object): expressions in a different way. """ - def __init__(self, default_globals=None, scripting_additions=None): - - self.globals = default_globals or {} - self.globals.update(scripting_additions or {}) + def __init__(self, default_globals=None, scripting_additions=None, environment=None): + if default_globals is not None or scripting_additions is not None: + warnings.warn(f'default_globals and scripting_additions are deprecated. ' + f'Please provide an environment such as TaskDataEnvrionment', + DeprecationWarning, stacklevel=2) + if environment is None: + environment_globals = {} + environment_globals.update(default_globals or {}) + environment_globals.update(scripting_additions or {}) + self.environment = TaskDataEnvironment(environment_globals) + else: + self.environment = environment self.error_tasks = {} def validate(self, expression): @@ -175,7 +125,7 @@ class PythonScriptEngine(object): same name as a pre-defined script, rending the script un-callable. This results in a nearly indecipherable error. Better to fail fast with a sensible error message.""" - func_overwrites = set(self.globals).intersection(task.data) + func_overwrites = set(self.environment.globals).intersection(task.data) func_overwrites.update(set(external_methods).intersection(task.data)) if len(func_overwrites) > 0: msg = f"You have task data that overwrites a predefined " \ @@ -183,45 +133,8 @@ class PythonScriptEngine(object): f"field name(s) to something else: {func_overwrites}" raise WorkflowTaskException(msg, task=task) - def convert_to_box(self, data): - if isinstance(data, dict): - for key, value in data.items(): - if not isinstance(value, Box): - data[key] = self.convert_to_box(value) - return Box(data) - if isinstance(data, list): - for idx, value in enumerate(data): - data[idx] = self.convert_to_box(value) - return data - return data - def _evaluate(self, expression, context, external_methods=None): - - globals = copy.copy(self.globals) # else we pollute all later evals. - self.convert_to_box(context) - globals.update(external_methods or {}) - globals.update(context) - return eval(expression, globals) + return self.environment.evaluate(expression, context, external_methods) def _execute(self, script, context, external_methods=None): - - my_globals = copy.copy(self.globals) - self.convert_to_box(context) - my_globals.update(external_methods or {}) - context.update(my_globals) - try: - exec(script, context) - finally: - self.remove_globals_and_functions_from_context(context, - external_methods) - - def remove_globals_and_functions_from_context(self, context, - external_methods=None): - """When executing a script, don't leave the globals, functions - and external methods in the context that we have modified.""" - for k in list(context): - if k == "__builtins__" or \ - hasattr(context[k], '__call__') or \ - k in self.globals or \ - external_methods and k in external_methods: - context.pop(k) + self.environment.execute(script, context, external_methods) diff --git a/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py b/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py new file mode 100644 index 000000000..e09beb7c1 --- /dev/null +++ b/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py @@ -0,0 +1,122 @@ +import copy +import warnings + +class BasePythonScriptEngineEnvironment: + def __init__(self, environment_globals=None): + self.globals = environment_globals or {} + + def evaluate(self, expression, context, external_methods=None): + raise NotImplementedError("Subclass must implement this method") + + def execute(self, script, context, external_methods=None): + raise NotImplementedError("Subclass must implement this method") + +class TaskDataEnvironment(BasePythonScriptEngineEnvironment): + def evaluate(self, expression, context, external_methods=None): + my_globals = copy.copy(self.globals) # else we pollute all later evals. + self._prepare_context(context) + my_globals.update(external_methods or {}) + my_globals.update(context) + return eval(expression, my_globals) + + def execute(self, script, context, external_methods=None): + my_globals = copy.copy(self.globals) + self._prepare_context(context) + my_globals.update(external_methods or {}) + context.update(my_globals) + try: + exec(script, context) + finally: + self._remove_globals_and_functions_from_context(context, external_methods) + + def _prepare_context(self, context): + pass + + def _remove_globals_and_functions_from_context(self, context, + external_methods=None): + """When executing a script, don't leave the globals, functions + and external methods in the context that we have modified.""" + for k in list(context): + if k == "__builtins__" or \ + hasattr(context[k], '__call__') or \ + k in self.globals or \ + external_methods and k in external_methods: + context.pop(k) + +class Box(dict): + """ + Example: + m = Box({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer']) + """ + + def __init__(self, *args, **kwargs): + warnings.warn('The usage of Box has been deprecated.', DeprecationWarning, stacklevel=2) + super(Box, self).__init__(*args, **kwargs) + for arg in args: + if isinstance(arg, dict): + for k, v in arg.items(): + if isinstance(v, dict): + self[k] = Box(v) + else: + self[k] = v + + if kwargs: + for k, v in kwargs.items(): + if isinstance(v, dict): + self[k] = Box(v) + else: + self[k] = v + + def __deepcopy__(self, memodict=None): + if memodict is None: + memodict = {} + my_copy = Box() + for k, v in self.items(): + my_copy[k] = copy.deepcopy(v) + return my_copy + + def __getattr__(self, attr): + try: + output = self[attr] + except: + raise AttributeError( + "Dictionary has no attribute '%s' " % str(attr)) + return output + + def __setattr__(self, key, value): + self.__setitem__(key, value) + + def __setitem__(self, key, value): + super(Box, self).__setitem__(key, value) + self.__dict__.update({key: value}) + + def __getstate__(self): + return self.__dict__ + + def __setstate__(self, state): + self.__init__(state) + + def __delattr__(self, item): + self.__delitem__(item) + + def __delitem__(self, key): + super(Box, self).__delitem__(key) + del self.__dict__[key] + + @classmethod + def convert_to_box(cls, data): + if isinstance(data, dict): + for key, value in data.items(): + if not isinstance(value, Box): + data[key] = cls.convert_to_box(value) + return Box(data) + if isinstance(data, list): + for idx, value in enumerate(data): + data[idx] = cls.convert_to_box(value) + return data + return data + +class BoxedTaskDataEnvironment(TaskDataEnvironment): + def _prepare_context(self, context): + Box.convert_to_box(context) + diff --git a/SpiffWorkflow/bpmn/parser/BpmnParser.py b/SpiffWorkflow/bpmn/parser/BpmnParser.py index d74c6651c..6b98bb8a9 100644 --- a/SpiffWorkflow/bpmn/parser/BpmnParser.py +++ b/SpiffWorkflow/bpmn/parser/BpmnParser.py @@ -179,11 +179,8 @@ class BpmnParser(object): Add all filenames in the given list to the parser's set. """ for filename in filenames: - f = open(filename, 'r') - try: + with open(filename, 'r') as f: self.add_bpmn_xml(etree.parse(f), filename=filename) - finally: - f.close() def add_bpmn_xml(self, bpmn, filename=None): """ diff --git a/SpiffWorkflow/bpmn/serializer/bpmn_converters.py b/SpiffWorkflow/bpmn/serializer/bpmn_converters.py deleted file mode 100644 index c5e2e5c20..000000000 --- a/SpiffWorkflow/bpmn/serializer/bpmn_converters.py +++ /dev/null @@ -1,348 +0,0 @@ -from functools import partial - -from uuid import UUID -from datetime import datetime, timedelta - -from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification - -from .dictionary import DictionaryConverter - -from ..specs.events.event_definitions import ( - NoneEventDefinition, - MultipleEventDefinition, - SignalEventDefinition, - MessageEventDefinition, - CorrelationProperty, - TimeDateEventDefinition, - DurationTimerEventDefinition, - CycleTimerEventDefinition, - ErrorEventDefinition, - EscalationEventDefinition, - CancelEventDefinition, - TerminateEventDefinition, - NamedEventDefinition -) - -from ..specs.BpmnSpecMixin import BpmnSpecMixin -from ...operators import Attrib, PathAttrib - - -class BpmnDataConverter(DictionaryConverter): - """ - The default converter for task and workflow data. It allows some commonly used python objects - to be converted to a form that can be serialized with JSOM - - It also serves as a simple example for anyone who needs custom data serialization. If you have - custom objects or python objects not included here in your workflow/task data, then you should - replace or extend this with one that can handle the contents of your workflow. - """ - def __init__(self): - - super().__init__() - self.register(UUID, lambda v: { 'value': str(v) }, lambda v: UUID(v['value'])) - self.register(datetime, lambda v: { 'value': v.isoformat() }, lambda v: datetime.fromisoformat(v['value'])) - self.register(timedelta, lambda v: { 'days': v.days, 'seconds': v.seconds }, lambda v: timedelta(**v)) - - def convert(self, obj): - self.clean(obj) - return super().convert(obj) - - def clean(self, obj): - # This removes functions and other callables from task data. - # By default we don't want to serialize these - if isinstance(obj, dict): - items = [ (k, v) for k, v in obj.items() ] - for key, value in items: - if callable(value): - del obj[key] - -class BpmnDataSpecificationConverter: - - @staticmethod - def to_dict(data_spec): - return { 'name': data_spec.name, 'description': data_spec.description } - - @staticmethod - def from_dict(dct): - return BpmnDataSpecification(**dct) - - - -class BpmnTaskSpecConverter(DictionaryConverter): - """ - This the base Task Spec Converter. - - It contains methods for parsing generic and BPMN task spec attributes. - - If you have extended any of the the BPMN tasks with custom functionality, you'll need to - implement a converter for those task spec types. You'll need to implement the `to_dict` and - `from_dict` methods on any inheriting classes. - - The default task spec converters are in `task_converters`; the `camunda` and `dmn` - serialization packages contain other examples. - """ - - def __init__(self, spec_class, data_converter, typename=None): - """The default task spec converter. This will generally be registered with a workflow - spec converter. - - Task specs can contain arbitrary data, though none of the default BPMN tasks do. We - may remove this functionality in the future. Therefore, the data_converter can be - `None`; if this is the case, task spec attributes that can contain arbitrary data will be - ignored. - - :param spec_class: the class defining the task type - :param data_converter: a converter for custom data (can be None) - :param typename: an optional typename for the object registration - """ - super().__init__() - self.spec_class = spec_class - self.data_converter = data_converter - self.typename = typename if typename is not None else spec_class.__name__ - - event_definitions = [ - NoneEventDefinition, - CancelEventDefinition, - TerminateEventDefinition, - SignalEventDefinition, - MessageEventDefinition, - ErrorEventDefinition, - EscalationEventDefinition, - TimeDateEventDefinition, - DurationTimerEventDefinition, - CycleTimerEventDefinition, - MultipleEventDefinition - ] - - for event_definition in event_definitions: - self.register( - event_definition, - self.event_definition_to_dict, - partial(self.event_defintion_from_dict, event_definition) - ) - - self.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib)) - self.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib)) - self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict) - - def to_dict(self, spec): - """ - The convert method that will be called when a Task Spec Converter is registered with a - Workflow Spec Converter. - """ - raise NotImplementedError - - def from_dict(self, dct): - """ - The restore method that will be called when a Task Spec Converter is registered with a - Workflow Spec Converter. - """ - raise NotImplementedError - - def get_default_attributes(self, spec): - """Extracts the default Spiff attributes from a task spec. - - :param spec: the task spec to be converted - - Returns: - a dictionary of standard task spec attributes - """ - dct = { - 'id': spec.id, - 'name': spec.name, - 'description': spec.description, - 'manual': spec.manual, - 'internal': spec.internal, - 'lookahead': spec.lookahead, - 'inputs': [task.name for task in spec.inputs], - 'outputs': [task.name for task in spec.outputs], - } - # This stuff is also all defined in the base task spec, but can contain data, so we need - # our data serializer. I think we should try to get this stuff out of the base task spec. - if self.data_converter is not None: - dct['data'] = self.data_converter.convert(spec.data) - dct['defines'] = self.data_converter.convert(spec.defines) - dct['pre_assign'] = self.data_converter.convert(spec.pre_assign) - dct['post_assign'] = self.data_converter.convert(spec.post_assign) - - return dct - - def get_bpmn_attributes(self, spec): - """Extracts the attributes added by the `BpmnSpecMixin` class. - - :param spec: the task spec to be converted - - Returns: - a dictionary of BPMN task spec attributes - """ - return { - 'lane': spec.lane, - 'documentation': spec.documentation, - 'loopTask': spec.loopTask, - 'position': spec.position, - 'data_input_associations': [ self.convert(obj) for obj in spec.data_input_associations ], - 'data_output_associations': [ self.convert(obj) for obj in spec.data_output_associations ], - } - - def get_join_attributes(self, spec): - """Extracts attributes for task specs that inherit from `Join`. - - :param spec: the task spec to be converted - - Returns: - a dictionary of `Join` task spec attributes - """ - return { - 'split_task': spec.split_task, - 'threshold': spec.threshold, - 'cancel': spec.cancel_remaining, - } - - def get_subworkflow_attributes(self, spec): - """Extracts attributes for task specs that inherit from `SubWorkflowTask`. - - :param spec: the task spec to be converted - - Returns: - a dictionary of subworkflow task spec attributes - """ - return {'spec': spec.spec} - - def task_spec_from_dict(self, dct): - """ - Creates a task spec based on the supplied dictionary. It handles setting the default - task spec attributes as well as attributes added by `BpmnSpecMixin`. - - :param dct: the dictionary to create the task spec from - - Returns: - a restored task spec - """ - internal = dct.pop('internal') - inputs = dct.pop('inputs') - outputs = dct.pop('outputs') - - spec = self.spec_class(**dct) - spec.internal = internal - spec.inputs = inputs - spec.outputs = outputs - spec.id = dct['id'] - - if self.data_converter is not None: - spec.data = self.data_converter.restore(dct.get('data', {})) - spec.defines = self.data_converter.restore(dct.get('defines', {})) - spec.pre_assign = self.data_converter.restore(dct.get('pre_assign', {})) - spec.post_assign = self.data_converter.restore(dct.get('post_assign', {})) - - if isinstance(spec, BpmnSpecMixin): - spec.documentation = dct.pop('documentation', None) - spec.lane = dct.pop('lane', None) - spec.loopTask = dct.pop('loopTask', False) - spec.data_input_associations = self.restore(dct.pop('data_input_associations', [])) - spec.data_output_associations = self.restore(dct.pop('data_output_associations', [])) - - return spec - - def event_definition_to_dict(self, event_definition): - """ - Converts an BPMN event definition to a dict. It will not typically be called directly, - but via `convert` and will convert any event type supported by Spiff. - - :param event_definition: the event_definition to be converted. - - Returns: - a dictionary representation of an event definition - """ - dct = {'internal': event_definition.internal, 'external': event_definition.external} - - if isinstance(event_definition, NamedEventDefinition): - dct['name'] = event_definition.name - if isinstance(event_definition, MessageEventDefinition): - dct['correlation_properties'] = [prop.__dict__ for prop in event_definition.correlation_properties] - if isinstance(event_definition, (TimeDateEventDefinition, DurationTimerEventDefinition, CycleTimerEventDefinition)): - dct['name'] = event_definition.name - dct['expression'] = event_definition.expression - if isinstance(event_definition, ErrorEventDefinition): - dct['error_code'] = event_definition.error_code - if isinstance(event_definition, EscalationEventDefinition): - dct['escalation_code'] = event_definition.escalation_code - if isinstance(event_definition, MultipleEventDefinition): - dct['event_definitions'] = [self.convert(e) for e in event_definition.event_definitions] - dct['parallel'] = event_definition.parallel - - return dct - - def event_defintion_from_dict(self, definition_class, dct): - """Restores an event definition. It will not typically be called directly, but via - `restore` and will restore any BPMN event type supporred by Spiff. - - :param definition_class: the class that will be used to create the object - :param dct: the event definition attributes - - Returns: - an `EventDefinition` object - """ - internal, external = dct.pop('internal'), dct.pop('external') - if 'correlation_properties' in dct: - dct['correlation_properties'] = [CorrelationProperty(**prop) for prop in dct['correlation_properties']] - if 'event_definitions' in dct: - dct['event_definitions'] = [self.restore(d) for d in dct['event_definitions']] - event_definition = definition_class(**dct) - event_definition.internal = internal - event_definition.external = external - return event_definition - - def attrib_to_dict(self, attrib): - return { 'name': attrib.name } - - def attrib_from_dict(self, attrib_class, dct): - return attrib_class(dct['name']) - - -class BpmnWorkflowSpecConverter(DictionaryConverter): - """ - This is the base converter for a BPMN workflow spec. - - It will register converters for the task spec types contained in the workflow, as well as - the workflow spec class itself. - - This class can be extended if you implement a custom workflow spec type. See the converter - in `workflow_spec_converter` for an example. - """ - - def __init__(self, spec_class, task_spec_converters, data_converter=None): - """ - Converter for a BPMN workflow spec class. - - The `to_dict` and `from_dict` methods of the given task spec converter classes will - be registered, so that they can be restored automatically. - - The data_converter applied to task *spec* data, not task data, and may be `None`. See - `BpmnTaskSpecConverter` for more discussion. - - :param spec_class: the workflow spec class - :param task_spec_converters: a list of `BpmnTaskSpecConverter` classes - :param data_converter: an optional data converter - """ - super().__init__() - self.spec_class = spec_class - self.data_converter = data_converter - - self.register(spec_class, self.to_dict, self.from_dict) - for converter in task_spec_converters: - self.register(converter.spec_class, converter.to_dict, converter.from_dict, converter.typename) - self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict) - - def to_dict(self, spec): - """ - The convert method that will be called when a Workflow Spec Converter is registered with a - Workflow Converter. - """ - raise NotImplementedError - - def from_dict(self, dct): - """ - The restore method that will be called when a Workflow Spec Converter is registered with a - Workflow Converter. - """ - raise NotImplementedError diff --git a/SpiffWorkflow/bpmn/serializer/event_definition.py b/SpiffWorkflow/bpmn/serializer/event_definition.py new file mode 100644 index 000000000..35879688e --- /dev/null +++ b/SpiffWorkflow/bpmn/serializer/event_definition.py @@ -0,0 +1,127 @@ +from .helpers.spec import EventDefinitionConverter + +from ..specs.events.event_definitions import ( + CancelEventDefinition, + ErrorEventDefinition, + EscalationEventDefinition, + MessageEventDefinition, + NoneEventDefinition, + SignalEventDefinition, + TerminateEventDefinition, + TimeDateEventDefinition, + DurationTimerEventDefinition, + CycleTimerEventDefinition, + MultipleEventDefinition, +) + +class CancelEventDefinitionConverter(EventDefinitionConverter): + def __init__(self, registry): + super().__init__(CancelEventDefinition, registry) + + +class ErrorEventDefinitionConverter(EventDefinitionConverter): + + def __init__(self, registry): + super().__init__(ErrorEventDefinition, registry) + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['error_code'] = event_definition.error_code + return dct + + +class EscalationEventDefinitionConverter(EventDefinitionConverter): + + def __init__(self, registry): + super().__init__(EscalationEventDefinition, registry) + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['escalation_code'] = event_definition.escalation_code + return dct + + +class MessageEventDefinitionConverter(EventDefinitionConverter): + + def __init__(self, registry): + super().__init__(MessageEventDefinition, registry) + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties) + return dct + + def from_dict(self, dct): + dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties']) + event_definition = super().from_dict(dct) + return event_definition + + +class NoneEventDefinitionConverter(EventDefinitionConverter): + def __init__(self, registry): + super().__init__(NoneEventDefinition, registry) + + +class SignalEventDefinitionConverter(EventDefinitionConverter): + def __init__(self, registry): + super().__init__(SignalEventDefinition, registry) + + +class TerminateEventDefinitionConverter(EventDefinitionConverter): + def __init__(self, registry): + super().__init__(TerminateEventDefinition, registry) + + +class TimerEventDefinitionConverter(EventDefinitionConverter): + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['expression'] = event_definition.expression + return dct + +class TimeDateEventDefinitionConverter(TimerEventDefinitionConverter): + def __init__(self, registry): + super().__init__(TimeDateEventDefinition, registry) + + +class DurationTimerEventDefinitionConverter(TimerEventDefinitionConverter): + def __init__(self, registry): + super().__init__(DurationTimerEventDefinition, registry) + + +class CycleTimerEventDefinitionConverter(TimerEventDefinitionConverter): + def __init__(self, registry): + super().__init__(CycleTimerEventDefinition, registry) + + +class MultipleEventDefinitionConverter(EventDefinitionConverter): + + def __init__(self, registry): + super().__init__(MultipleEventDefinition, registry) + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['parallel'] = event_definition.parallel + dct['event_definitions'] = [self.registry.convert(e) for e in event_definition.event_definitions] + return dct + + def from_dict(self, dct): + events = dct.pop('event_definitions') + event_definition = super().from_dict(dct) + event_definition.event_definitions = [self.registry.restore(d) for d in events] + return event_definition + + +DEFAULT_EVENT_CONVERTERS = [ + CancelEventDefinitionConverter, + ErrorEventDefinitionConverter, + EscalationEventDefinitionConverter, + MessageEventDefinitionConverter, + NoneEventDefinitionConverter, + SignalEventDefinitionConverter, + TerminateEventDefinitionConverter, + TimeDateEventDefinitionConverter, + DurationTimerEventDefinitionConverter, + CycleTimerEventDefinitionConverter, + MultipleEventDefinitionConverter, +] \ No newline at end of file diff --git a/SpiffWorkflow/bpmn/serializer/helpers/__init__.py b/SpiffWorkflow/bpmn/serializer/helpers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/SpiffWorkflow/bpmn/serializer/dictionary.py b/SpiffWorkflow/bpmn/serializer/helpers/dictionary.py similarity index 91% rename from SpiffWorkflow/bpmn/serializer/dictionary.py rename to SpiffWorkflow/bpmn/serializer/helpers/dictionary.py index 921ce1b25..b69788242 100644 --- a/SpiffWorkflow/bpmn/serializer/dictionary.py +++ b/SpiffWorkflow/bpmn/serializer/helpers/dictionary.py @@ -2,28 +2,28 @@ from functools import partial class DictionaryConverter: """ - This is a base class used to convert BPMN specs, workflows, tasks, and data to - dictionaries of JSON-serializable objects. Actual serialization is done as the + This is a base class used to convert BPMN specs, workflows, tasks, and (optonally) + data to dictionaries of JSON-serializable objects. Actual serialization is done as the very last step by other classes. - This class allows you to register to_dict and from_dict functions for non-JSON- + This class allows you to register `to_dict` and `from_dict` functions for non-JSON- serializable objects. - When an object is passed into `convert`, it will call the supplied to_dict + When an object is passed into `convert`, it will call the supplied `to_dict` function on any classes that have been registered. The supplied to_dict function must return a dictionary. The object's `typename` will be added to this dictionary by the converter. The (unqualified) class name will be used as the `typename` if one is not supplied. - You can optionally supply our own names (you'll need to do this if you need to - identically named classes in multiple packages). + You can optionally supply our own names (you'll need to do this if you use identically + named classes in multiple packages). When a dictionary is passed into `restore`, it will be checked for a `typename` key. - If a registered `typename` is found, the supplied from_dict function will be + If a registered `typename` is found, the supplied `from_dict` function will be called. Unrecognized objects will be returned as-is. For a simple example of how to use this class, see the `BpmnDataConverter` in - `bpmn_converters`. + `registry`. """ def __init__(self): diff --git a/SpiffWorkflow/bpmn/serializer/helpers/registry.py b/SpiffWorkflow/bpmn/serializer/helpers/registry.py new file mode 100644 index 000000000..3c5581b33 --- /dev/null +++ b/SpiffWorkflow/bpmn/serializer/helpers/registry.py @@ -0,0 +1,33 @@ +from uuid import UUID +from datetime import datetime, timedelta + +from .dictionary import DictionaryConverter + +class DefaultRegistry(DictionaryConverter): + """ + The default converter for task and workflow data. It allows some commonly used python objects + to be converted to a form that can be serialized with JSOM + + It also serves as a simple example for anyone who needs custom data serialization. If you have + custom objects or python objects not included here in your workflow/task data, then you should + replace or extend this with one that can handle the contents of your workflow. + """ + def __init__(self): + + super().__init__() + self.register(UUID, lambda v: { 'value': str(v) }, lambda v: UUID(v['value'])) + self.register(datetime, lambda v: { 'value': v.isoformat() }, lambda v: datetime.fromisoformat(v['value'])) + self.register(timedelta, lambda v: { 'days': v.days, 'seconds': v.seconds }, lambda v: timedelta(**v)) + + def convert(self, obj): + self.clean(obj) + return super().convert(obj) + + def clean(self, obj): + # This removes functions and other callables from task data. + # By default we don't want to serialize these + if isinstance(obj, dict): + items = [ (k, v) for k, v in obj.items() ] + for key, value in items: + if callable(value): + del obj[key] \ No newline at end of file diff --git a/SpiffWorkflow/bpmn/serializer/helpers/spec.py b/SpiffWorkflow/bpmn/serializer/helpers/spec.py new file mode 100644 index 000000000..1708698f2 --- /dev/null +++ b/SpiffWorkflow/bpmn/serializer/helpers/spec.py @@ -0,0 +1,248 @@ +from functools import partial + +from ...specs.BpmnSpecMixin import BpmnSpecMixin +from ...specs.events.event_definitions import NamedEventDefinition, TimerEventDefinition +from ...specs.events.event_definitions import CorrelationProperty +from ....operators import Attrib, PathAttrib + + +class BpmnSpecConverter: + """The base class for conversion of BPMN spec classes. + + In general, most classes that extend this would simply take an existing registry as an + argument and automatically supply the class along with the implementations of the + conversion functions `to_dict` and `from_dict`. + + The operation of the spec converter is a little opaque, but hopefully makes sense with a + little explanation. + + The registry is a `DictionaryConverter` that registers conversion methods by class. It can be + pre-populated with methods for custom data (though this is not required) and is passed into + each of these sublclasses. When a subclass of this one gets instantiated, it adds itself + to this registry. + + This seems a little bit backwards -- the registry is using the subclass, so it seems like we + ought to pass the subclass to the registry. However, there is a lot of interdependence across + the spec classes, so this doesn't work that well in practice -- most classes need to know about + all the other classes, and this was the most concise way I could think of to make that happen. + + The goal is to be able to replace almost any spec class at the top level without classes that + use it to reimplement conversion mechanisms. So for example, it is not necessary to + re-implemnent all event-based task spec conversions because, eg, the + `MessageEventDefintion` was modified. + """ + def __init__(self, spec_class, registry, typename=None): + """Constructor for a BPMN spec. + + :param spec_class: the class of the spec the subclass provides conversions for + :param registry: a registry of conversions to which this one should be added + :param typename: the name of the class as it will appear in the serialization + """ + self.spec_class = spec_class + self.registry = registry + self.typename = typename if typename is not None else spec_class.__name__ + self.registry.register(spec_class, self.to_dict, self.from_dict, self.typename) + + def to_dict(self, spec): + raise NotImplementedError + + def from_dict(self, dct): + raise NotImplementedError + + +class BpmnDataSpecificationConverter(BpmnSpecConverter): + """This is the base Data Spec converter. + + Currently the only use is Data Objects. + """ + + def to_dict(self, data_spec): + return { 'name': data_spec.name, 'description': data_spec.description } + + def from_dict(self, dct): + return self.spec_class(**dct) + + +class EventDefinitionConverter(BpmnSpecConverter): + """This is the base Event Defintiion Converter. + + It provides conversions for the great majority of BPMN events as-is, and contains + one custom method for serializing Correlation Properties (as Message Event Defintiions + are likely to the most commonly extended event definition spec). + """ + + def to_dict(self, event_definition): + dct = {'internal': event_definition.internal, 'external': event_definition.external} + if isinstance(event_definition, (NamedEventDefinition, TimerEventDefinition)): + dct['name'] = event_definition.name + return dct + + def from_dict(self, dct): + internal, external = dct.pop('internal'), dct.pop('external') + event_definition = self.spec_class(**dct) + event_definition.internal = internal + event_definition.external = external + return event_definition + + def correlation_properties_to_dict(self, props): + return [prop.__dict__ for prop in props] + + def correlation_properties_from_dict(self, props): + return [CorrelationProperty(**prop) for prop in props] + + +class TaskSpecConverter(BpmnSpecConverter): + """ + This the base Task Spec Converter. + + It contains methods for parsing generic and BPMN task spec attributes. + + If you have extended any of the the BPMN tasks with custom functionality, you'll need to + implement a converter for those task spec types. You'll need to implement the `to_dict` and + `from_dict` methods on any inheriting classes. + + The default task spec converters are in the `task`, 'process_spec`, and 'event_definitions` + modules of this package; the `camunda`,`dmn`, and `spiff` serialization packages contain other + examples. + """ + def get_default_attributes(self, spec, include_data=False): + """Extracts the default Spiff attributes from a task spec. + + :param spec: the task spec to be converted + + Returns: + a dictionary of standard task spec attributes + """ + dct = { + 'id': spec.id, + 'name': spec.name, + 'description': spec.description, + 'manual': spec.manual, + 'internal': spec.internal, + 'lookahead': spec.lookahead, + 'inputs': [task.name for task in spec.inputs], + 'outputs': [task.name for task in spec.outputs], + } + # This stuff is also all defined in the base task spec, but can contain data, so we need + # our data serializer. I think we should try to get this stuff out of the base task spec. + if include_data: + dct['data'] = self.registry.convert(spec.data) + dct['defines'] = self.registry.convert(spec.defines) + dct['pre_assign'] = self.registry.convert(spec.pre_assign) + dct['post_assign'] = self.registry.convert(spec.post_assign) + + return dct + + def get_bpmn_attributes(self, spec): + """Extracts the attributes added by the `BpmnSpecMixin` class. + + :param spec: the task spec to be converted + + Returns: + a dictionary of BPMN task spec attributes + """ + return { + 'lane': spec.lane, + 'documentation': spec.documentation, + 'loopTask': spec.loopTask, + 'position': spec.position, + 'data_input_associations': [ self.registry.convert(obj) for obj in spec.data_input_associations ], + 'data_output_associations': [ self.registry.convert(obj) for obj in spec.data_output_associations ], + } + + def get_join_attributes(self, spec): + """Extracts attributes for task specs that inherit from `Join`. + + :param spec: the task spec to be converted + + Returns: + a dictionary of `Join` task spec attributes + """ + return { + 'split_task': spec.split_task, + 'threshold': spec.threshold, + 'cancel': spec.cancel_remaining, + } + + def get_subworkflow_attributes(self, spec): + """Extracts attributes for task specs that inherit from `SubWorkflowTask`. + + :param spec: the task spec to be converted + + Returns: + a dictionary of subworkflow task spec attributes + """ + return {'spec': spec.spec} + + def task_spec_from_dict(self, dct, include_data=False): + """ + Creates a task spec based on the supplied dictionary. It handles setting the default + task spec attributes as well as attributes added by `BpmnSpecMixin`. + + :param dct: the dictionary to create the task spec from + :param include_data: whether or not to include task spec data attributes + + Returns: + a restored task spec + """ + internal = dct.pop('internal') + inputs = dct.pop('inputs') + outputs = dct.pop('outputs') + + spec = self.spec_class(**dct) + spec.internal = internal + spec.inputs = inputs + spec.outputs = outputs + spec.id = dct['id'] + + if include_data: + spec.data = self.registry.restore(dct.get('data', {})) + spec.defines = self.registry.restore(dct.get('defines', {})) + spec.pre_assign = self.registry.restore(dct.get('pre_assign', {})) + spec.post_assign = self.registry.restore(dct.get('post_assign', {})) + + if isinstance(spec, BpmnSpecMixin): + spec.documentation = dct.pop('documentation', None) + spec.lane = dct.pop('lane', None) + spec.loopTask = dct.pop('loopTask', False) + spec.data_input_associations = self.registry.restore(dct.pop('data_input_associations', [])) + spec.data_output_associations = self.registry.restore(dct.pop('data_output_associations', [])) + + return spec + + +class WorkflowSpecConverter(BpmnSpecConverter): + """ + This is the base converter for a BPMN workflow spec. + + It will register converters for the task spec types contained in the workflow, as well as + the workflow spec class itself. + + This class can be extended if you implement a custom workflow spec type. See the converter + in `workflow_spec_converter` for an example. + """ + + def __init__(self, spec_class, registry): + """ + Converter for a BPMN workflow spec class. + + The `to_dict` and `from_dict` methods of the given task spec converter classes will + be registered, so that they can be restored automatically. + + The data_converter applied to task *spec* data, not task data, and may be `None`. See + `BpmnTaskSpecConverter` for more discussion. + + :param spec_class: the workflow spec class + :param task_spec_converters: a list of `BpmnTaskSpecConverter` classes + """ + super().__init__(spec_class, registry) + + # Leaving these as-as, as I can't imagine anyone would need or want to extend + self.registry.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib)) + self.registry.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib)) + + def attrib_to_dict(self, attrib): + return { 'name': attrib.name } + + def attrib_from_dict(self, attrib_class, dct): + return attrib_class(dct['name']) \ No newline at end of file diff --git a/SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py b/SpiffWorkflow/bpmn/serializer/process_spec.py similarity index 67% rename from SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py rename to SpiffWorkflow/bpmn/serializer/process_spec.py index 13aae1a9c..12bf29718 100644 --- a/SpiffWorkflow/bpmn/serializer/workflow_spec_converter.py +++ b/SpiffWorkflow/bpmn/serializer/process_spec.py @@ -1,18 +1,20 @@ -from .bpmn_converters import BpmnWorkflowSpecConverter +from .helpers.spec import WorkflowSpecConverter, BpmnDataSpecificationConverter from ..specs.BpmnProcessSpec import BpmnProcessSpec from ..specs.MultiInstanceTask import MultiInstanceTask, getDynamicMIClass from ..specs.events.IntermediateEvent import _BoundaryEventParent - -from ...operators import Attrib, PathAttrib -from ...specs.WorkflowSpec import WorkflowSpec +from ..specs.BpmnProcessSpec import BpmnDataSpecification -class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): +class BpmnDataObjectConverter(BpmnDataSpecificationConverter): + def __init__(self, registry, typename=None): + super().__init__(BpmnDataSpecification, registry, typename) - def __init__(self, task_spec_converters, data_converter=None): - super().__init__(BpmnProcessSpec, task_spec_converters, data_converter) - self.register(WorkflowSpec, self.base_workflow_spec_to_dict, self.from_dict) + +class BpmnProcessSpecConverter(WorkflowSpecConverter): + + def __init__(self, registry): + super().__init__(BpmnProcessSpec, registry) def multi_instance_to_dict(self, spec): @@ -22,18 +24,15 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): # Bypass the automatic selection of a conversion function # This returns the partial function that was created on register for the original task type. # The second argument is the function that would be called by `convert`. - conversion = self.convert_to_dict[classname] + conversion = self.registry.convert_to_dict[classname] func = conversion.args[1] # We can just call it directly and add the typename manually dct = func(spec) dct['typename'] = classname - # And we have to do this here, rather than in a converter - # We also have to manually apply the Attrib conversions - convert_attrib = lambda v: { 'name': v.name, 'typename': v.__class__.__name__ } dct.update({ - 'times': convert_attrib(spec.times) if spec.times is not None else None, + 'times': self.registry.convert(spec.times) if spec.times is not None else None, 'elementVar': spec.elementVar, - 'collection': convert_attrib(spec.collection) if spec.collection is not None else None, + 'collection': self.registry.convert(spec.collection) if spec.collection is not None else None, # These are not defined in the constructor, but added by the parser, or somewhere else inappropriate 'completioncondition': spec.completioncondition, 'prevtaskclass': spec.prevtaskclass, @@ -62,28 +61,26 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): attrs.append('dmnEngine') # Terrible ugly hack - registered = dict((name, c) for c, name in self.typenames.items()) + registered = dict((name, c) for c, name in self.registry.typenames.items()) # First get the dynamic class cls = getDynamicMIClass(dct['name'], registered[dct['typename']]) # Restore the task according to the original task spec, so that its attributes can be converted # recursively - original = self.restore(dct.copy()) + original = self.registry.restore(dct.copy()) # But this task has the wrong class, so delete it from the spec del dct['wf_spec'].task_specs[original.name] # Create a new class using the dynamic class task_spec = cls(**dct) - # Restore the attributes that weren't recognized by the original converter - restore_attrib = lambda v: Attrib(v['name']) if v['typename'] == 'Attrib' else PathAttrib(v['name']) - task_spec.times = restore_attrib(dct['times']) if dct['times'] is not None else None - task_spec.collection = restore_attrib(dct['collection']) if dct['collection'] is not None else None + task_spec.times = self.registry.restore(dct['times']) if dct['times'] is not None else None + task_spec.collection = self.registry.restore(dct['collection']) if dct['collection'] is not None else None # Now copy everything else, from the temporary task spec if possible, otherwise the dict for attr in attrs: # If the original task has the attr, use the converted value if hasattr(original, attr): task_spec.__dict__[attr] = original.__dict__[attr] else: - task_spec.__dict__[attr] = self.restore(dct[attr]) + task_spec.__dict__[attr] = self.registry.restore(dct[attr]) # Handle adding any remaining attributes from the original task type that might not be # present in the restored version (for example attributes added since last serialized) @@ -110,16 +107,16 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): 'description': spec.description, 'file': spec.file, 'task_specs': {}, - 'data_inputs': [ self.convert(obj) for obj in spec.data_inputs ], - 'data_outputs': [ self.convert(obj) for obj in spec.data_outputs ], - 'data_objects': dict([ (name, self.convert(obj)) for name, obj in spec.data_objects .items() ]), + 'data_inputs': [ self.registry.convert(obj) for obj in spec.data_inputs ], + 'data_outputs': [ self.registry.convert(obj) for obj in spec.data_outputs ], + 'data_objects': dict([ (name, self.registry.convert(obj)) for name, obj in spec.data_objects.items() ]), 'correlation_keys': spec.correlation_keys, } for name, task_spec in spec.task_specs.items(): if isinstance(task_spec, MultiInstanceTask): task_dict = self.multi_instance_to_dict(task_spec) else: - task_dict = self.convert(task_spec) + task_dict = self.registry.convert(task_spec) self.convert_task_spec_extensions(task_spec, task_dict) dct['task_specs'][name] = task_dict @@ -138,12 +135,12 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): del spec.task_specs[f'{spec.name}.EndJoin'] # Add the data specs - spec.data_inputs = [ self.restore(obj_dct) for obj_dct in dct.pop('data_inputs', []) ] - spec.data_outputs = [ self.restore(obj_dct) for obj_dct in dct.pop('data_outputs', []) ] + spec.data_inputs = [ self.registry.restore(obj_dct) for obj_dct in dct.pop('data_inputs', []) ] + spec.data_outputs = [ self.registry.restore(obj_dct) for obj_dct in dct.pop('data_outputs', []) ] # fixme: This conditional can be removed in the next release, just avoiding invalid a potential # serialization issue for some users caught between official releases. if isinstance(dct.get('data_objects', {}), dict): - spec.data_objects = dict([ (name, self.restore(obj_dct)) for name, obj_dct in dct.pop('data_objects', {}).items() ]) + spec.data_objects = dict([ (name, self.registry.restore(obj_dct)) for name, obj_dct in dct.pop('data_objects', {}).items() ]) else: spec.data_objects = {} @@ -159,7 +156,7 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): if 'prevtaskclass' in task_dict: task_spec = self.multiinstance_from_dict(task_dict) else: - task_spec = self.restore(task_dict) + task_spec = self.registry.restore(task_dict) if name == 'Start': spec.start = task_spec self.restore_task_spec_extensions(task_dict, task_spec) @@ -172,26 +169,3 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter): task_spec.outputs = [ spec.get_task_spec_from_name(name) for name in task_spec.outputs ] return spec - - def base_workflow_spec_to_dict(self, spec): - - # We should delete this method when we stop supporting the old serializer. - # It uses WorkflowSpec rather than BpmnWorkflowSpec, which does not support data objects. - # I hate copying this code here, but I am NOT putting an "if isinstance" check in the - # main method to handle a bug in the thing I'm replacing, - - dct = { - 'name': spec.name, - 'description': spec.description, - 'file': spec.file, - 'task_specs': {}, - } - for name, task_spec in spec.task_specs.items(): - if isinstance(task_spec, MultiInstanceTask): - task_dict = self.multi_instance_to_dict(task_spec) - else: - task_dict = self.convert(task_spec) - self.convert_task_spec_extensions(task_spec, task_dict) - dct['task_specs'][name] = task_dict - - return dct diff --git a/SpiffWorkflow/bpmn/serializer/task_spec.py b/SpiffWorkflow/bpmn/serializer/task_spec.py new file mode 100644 index 000000000..20382f9e9 --- /dev/null +++ b/SpiffWorkflow/bpmn/serializer/task_spec.py @@ -0,0 +1,292 @@ +from .helpers.spec import TaskSpecConverter + +from ...specs.StartTask import StartTask +from ...specs.Simple import Simple +from ...specs.LoopResetTask import LoopResetTask + +from ..specs.BpmnProcessSpec import _EndJoin +from ..specs.BpmnSpecMixin import _BpmnCondition +from ..specs.NoneTask import NoneTask +from ..specs.UserTask import UserTask +from ..specs.ManualTask import ManualTask +from ..specs.ScriptTask import ScriptTask +from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess +from ..specs.ExclusiveGateway import ExclusiveGateway +from ..specs.InclusiveGateway import InclusiveGateway +from ..specs.ParallelGateway import ParallelGateway +from ..specs.events.StartEvent import StartEvent +from ..specs.events.EndEvent import EndEvent +from ..specs.events.IntermediateEvent import ( + BoundaryEvent, + _BoundaryEventParent, + EventBasedGateway, + IntermediateCatchEvent, + IntermediateThrowEvent, + SendTask, + ReceiveTask, +) + +from ..workflow import BpmnWorkflow + + +class DefaultTaskSpecConverter(TaskSpecConverter): + + def to_dict(self, spec): + dct = self.get_default_attributes(spec) + return dct + + def from_dict(self, dct): + return self.task_spec_from_dict(dct) + + +class SimpleTaskConverter(DefaultTaskSpecConverter): + def __init__(self, registry): + super().__init__(Simple, registry) + + +class StartTaskConverter(DefaultTaskSpecConverter): + def __init__(self, registry): + super().__init__(StartTask, registry) + + +class LoopResetTaskConverter(DefaultTaskSpecConverter): + + def __init__(self, registry): + super().__init__(LoopResetTask, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['destination_id'] = str(spec.destination_id) + dct['destination_spec_name'] = spec.destination_spec_name + return dct + + def from_dict(self, dct): + spec = self.task_spec_from_dict(dct) + spec.destination_id = self.registry.convert(spec.destination_id) + return spec + + +class EndJoinConverter(DefaultTaskSpecConverter): + def __init__(self, registry): + super().__init__(_EndJoin, registry) + + +class BpmnTaskSpecConverter(TaskSpecConverter): + + def to_dict(self, spec): + dct = self.get_default_attributes(spec) + dct.update(self.get_bpmn_attributes(spec)) + return dct + + def from_dict(self, dct): + return self.task_spec_from_dict(dct) + + +class NoneTaskConverter(BpmnTaskSpecConverter): + def __init__(self, registry): + super().__init__(NoneTask, registry) + + +class UserTaskConverter(BpmnTaskSpecConverter): + def __init__(self, registry): + super().__init__(UserTask, registry) + + +class ManualTaskConverter(BpmnTaskSpecConverter): + def __init__(self, registry): + super().__init__(ManualTask, registry) + + +class ScriptTaskConverter(BpmnTaskSpecConverter): + + def __init__(self, registry): + super().__init__(ScriptTask, registry) + + def to_dict(self, spec): + dct = self.get_default_attributes(spec) + dct.update(self.get_bpmn_attributes(spec)) + dct['script'] = spec.script + return dct + + +class BoundaryEventParentConverter(BpmnTaskSpecConverter): + + def __init__(self, registry): + super().__init__(_BoundaryEventParent, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['main_child_task_spec'] = spec.main_child_task_spec.name + return dct + + +class SubprocessConverter(BpmnTaskSpecConverter): + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct.update(self.get_subworkflow_attributes(spec)) + return dct + + def from_dict(self, dct): + dct['subworkflow_spec'] = dct.pop('spec') + return self.task_spec_from_dict(dct) + + +class CallActivityTaskConverter(SubprocessConverter): + def __init__(self, registry): + super().__init__(CallActivity, registry) + self.wf_class = BpmnWorkflow + + +class TransactionSubprocessTaskConverter(SubprocessConverter): + def __init__(self, registry): + super().__init__(TransactionSubprocess, registry) + self.wf_class = BpmnWorkflow + + +class ConditionalGatewayConverter(BpmnTaskSpecConverter): + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['cond_task_specs'] = [ self.bpmn_condition_to_dict(cond) for cond in spec.cond_task_specs ] + dct['choice'] = spec.choice + return dct + + def from_dict(self, dct): + conditions = dct.pop('cond_task_specs') + spec = self.task_spec_from_dict(dct) + spec.cond_task_specs = [ self.bpmn_condition_from_dict(cond) for cond in conditions ] + return spec + + def bpmn_condition_from_dict(self, dct): + return (_BpmnCondition(dct['condition']) if dct['condition'] is not None else None, dct['task_spec']) + + def bpmn_condition_to_dict(self, condition): + expr, task_spec = condition + return { + 'condition': expr.args[0] if expr is not None else None, + 'task_spec': task_spec + } + + +class ExclusiveGatewayConverter(ConditionalGatewayConverter): + + def __init__(self, registry): + super().__init__(ExclusiveGateway, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['default_task_spec'] = spec.default_task_spec + return dct + + def from_dict(self, dct): + default_task_spec = dct.pop('default_task_spec') + spec = super().from_dict(dct) + spec.default_task_spec = default_task_spec + return spec + + +class InclusiveGatewayConverter(ConditionalGatewayConverter): + def __init__(self, registry): + super().__init__(InclusiveGateway, registry) + + +class ParallelGatewayConverter(BpmnTaskSpecConverter): + + def __init__(self, registry): + super().__init__(ParallelGateway, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct.update(self.get_join_attributes(spec)) + return dct + + def from_dict(self, dct): + return self.task_spec_from_dict(dct) + + +class EventConverter(BpmnTaskSpecConverter): + + def __init__(self, spec_class, registry): + super().__init__(spec_class, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['event_definition'] = self.registry.convert(spec.event_definition) + return dct + + def from_dict(self, dct): + dct['event_definition'] = self.registry.restore(dct['event_definition']) + return self.task_spec_from_dict(dct) + + +class StartEventConverter(EventConverter): + def __init__(self, registry): + super().__init__(StartEvent, registry) + + +class EndEventConverter(EventConverter): + def __init__(self, registry): + super().__init__(EndEvent, registry) + + +class IntermediateCatchEventConverter(EventConverter): + def __init__(self, registry): + super().__init__(IntermediateCatchEvent, registry) + + +class ReceiveTaskConverter(EventConverter): + def __init__(self, registry): + super().__init__(ReceiveTask, registry) + + +class IntermediateThrowEventConverter(EventConverter): + def __init__(self, registry): + super().__init__(IntermediateThrowEvent, registry) + + +class SendTaskConverter(EventConverter): + def __init__(self, registry): + super().__init__(SendTask, registry) + + +class BoundaryEventConverter(EventConverter): + + def __init__(self, registry): + super().__init__(BoundaryEvent, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['cancel_activity'] = spec.cancel_activity + return dct + + +class EventBasedGatewayConverter(EventConverter): + def __init__(self, registry): + super().__init__(EventBasedGateway, registry) + + +DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [ + SimpleTaskConverter, + StartTaskConverter, + EndJoinConverter, + LoopResetTaskConverter, + NoneTaskConverter, + UserTaskConverter, + ManualTaskConverter, + ScriptTaskConverter, + CallActivityTaskConverter, + TransactionSubprocessTaskConverter, + StartEventConverter, + EndEventConverter, + SendTaskConverter, + ReceiveTaskConverter, + IntermediateCatchEventConverter, + IntermediateThrowEventConverter, + EventBasedGatewayConverter, + BoundaryEventConverter, + BoundaryEventParentConverter, + ParallelGatewayConverter, + ExclusiveGatewayConverter, + InclusiveGatewayConverter, +] \ No newline at end of file diff --git a/SpiffWorkflow/bpmn/serializer/task_spec_converters.py b/SpiffWorkflow/bpmn/serializer/task_spec_converters.py deleted file mode 100644 index a6bfc829e..000000000 --- a/SpiffWorkflow/bpmn/serializer/task_spec_converters.py +++ /dev/null @@ -1,323 +0,0 @@ -from uuid import UUID - -from .bpmn_converters import BpmnTaskSpecConverter - -from ...specs.StartTask import StartTask -from ...specs.Simple import Simple -from ...specs.LoopResetTask import LoopResetTask - -from ..specs.BpmnProcessSpec import _EndJoin -from ..specs.BpmnSpecMixin import _BpmnCondition - -from ..specs.NoneTask import NoneTask -from ..specs.UserTask import UserTask -from ..specs.ManualTask import ManualTask -from ..specs.ScriptTask import ScriptTask -from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess - -from ..specs.ExclusiveGateway import ExclusiveGateway -from ..specs.InclusiveGateway import InclusiveGateway -from ..specs.ParallelGateway import ParallelGateway - -from ..specs.events.StartEvent import StartEvent -from ..specs.events.EndEvent import EndEvent -from ..specs.events.IntermediateEvent import BoundaryEvent, EventBasedGateway, IntermediateCatchEvent, IntermediateThrowEvent -from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask - -from ..workflow import BpmnWorkflow - - -class SimpleTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(Simple, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class StartTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(StartTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class LoopResetTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(LoopResetTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - # Maybe I should add this to the base task converter, but I'm trying to keep it free of - # anything but task related conversions - dct['destination_id'] = str(spec.destination_id) - dct['destination_spec_name'] = spec.destination_spec_name - return dct - - def from_dict(self, dct): - spec = self.task_spec_from_dict(dct) - spec.destination_id = UUID(spec.destination_id) - return spec - - -class EndJoinConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(_EndJoin, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class NoneTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(NoneTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class UserTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(UserTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class ManualTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(ManualTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class ScriptTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(ScriptTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct['script'] = spec.script - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class CallActivityTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(CallActivity, data_converter, typename) - self.wf_class = BpmnWorkflow - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct.update(self.get_subworkflow_attributes(spec)) - return dct - - def from_dict(self, dct): - dct['subworkflow_spec'] = dct.pop('spec') - return self.task_spec_from_dict(dct) - - -class TransactionSubprocessTaskConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(TransactionSubprocess, data_converter, typename) - self.wf_class = BpmnWorkflow - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct.update(self.get_subworkflow_attributes(spec)) - return dct - - def from_dict(self, dct): - dct['subworkflow_spec'] = dct.pop('spec') - return self.task_spec_from_dict(dct) - - -class ConditionalGatewayConverter(BpmnTaskSpecConverter): - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct['cond_task_specs'] = [ self.bpmn_condition_to_dict(cond) for cond in spec.cond_task_specs ] - dct['choice'] = spec.choice - return dct - - def from_dict(self, dct): - conditions = dct.pop('cond_task_specs') - spec = self.task_spec_from_dict(dct) - spec.cond_task_specs = [ self.bpmn_condition_from_dict(cond) for cond in conditions ] - return spec - - def bpmn_condition_from_dict(self, dct): - return (_BpmnCondition(dct['condition']) if dct['condition'] is not None else None, dct['task_spec']) - - def bpmn_condition_to_dict(self, condition): - - expr, task_spec = condition - return { - 'condition': expr.args[0] if expr is not None else None, - 'task_spec': task_spec - } - - -class ExclusiveGatewayConverter(ConditionalGatewayConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(ExclusiveGateway, data_converter, typename) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['default_task_spec'] = spec.default_task_spec - return dct - - def from_dict(self, dct): - default_task_spec = dct.pop('default_task_spec') - spec = super().from_dict(dct) - spec.default_task_spec = default_task_spec - return spec - - -class InclusiveGatewayConverter(ConditionalGatewayConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(InclusiveGateway, data_converter, typename) - - -class ParallelGatewayConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(ParallelGateway, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct.update(self.get_join_attributes(spec)) - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class EventConverter(BpmnTaskSpecConverter): - - def __init__(self, spec_class, data_converter, typename): - super().__init__(spec_class, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct['event_definition'] = self.convert(spec.event_definition) - return dct - - def from_dict(self, dct): - dct['event_definition'] = self.restore(dct['event_definition']) - return self.task_spec_from_dict(dct) - - -class StartEventConverter(EventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(StartEvent, data_converter, typename) - - -class EndEventConverter(EventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(EndEvent, data_converter, typename) - - -class IntermediateCatchEventConverter(EventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(IntermediateCatchEvent, data_converter, typename) - - -class ReceiveTaskConverter(EventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(ReceiveTask, data_converter, typename) - - -class IntermediateThrowEventConverter(EventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(IntermediateThrowEvent, data_converter, typename) - - -class SendTaskConverter(EventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(SendTask, data_converter, typename) - - -class BoundaryEventConverter(EventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(BoundaryEvent, data_converter, typename) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['cancel_activity'] = spec.cancel_activity - return dct - - -class BoundaryEventParentConverter(BpmnTaskSpecConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(_BoundaryEventParent, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct['main_child_task_spec'] = spec.main_child_task_spec.name - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class EventBasedGatewayConverter(EventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(EventBasedGateway, data_converter, typename) diff --git a/SpiffWorkflow/bpmn/serializer/workflow.py b/SpiffWorkflow/bpmn/serializer/workflow.py index ffbe8f3cf..aeca13708 100644 --- a/SpiffWorkflow/bpmn/serializer/workflow.py +++ b/SpiffWorkflow/bpmn/serializer/workflow.py @@ -3,34 +3,25 @@ import gzip from copy import deepcopy from uuid import UUID -from .version_migration import MIGRATIONS - -from .bpmn_converters import BpmnDataConverter - from ..workflow import BpmnMessage, BpmnWorkflow from ..specs.SubWorkflowTask import SubWorkflowTask from ...task import Task -from .workflow_spec_converter import BpmnProcessSpecConverter +from .version_migration import MIGRATIONS +from .helpers.registry import DefaultRegistry +from .helpers.dictionary import DictionaryConverter -from .task_spec_converters import SimpleTaskConverter, StartTaskConverter, EndJoinConverter, LoopResetTaskConverter -from .task_spec_converters import NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter -from .task_spec_converters import CallActivityTaskConverter, TransactionSubprocessTaskConverter -from .task_spec_converters import StartEventConverter, EndEventConverter -from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter -from .task_spec_converters import SendTaskConverter, ReceiveTaskConverter -from .task_spec_converters import BoundaryEventConverter, BoundaryEventParentConverter -from .task_spec_converters import ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter +from .process_spec import BpmnProcessSpecConverter, BpmnDataObjectConverter +from .task_spec import DEFAULT_TASK_SPEC_CONVERTER_CLASSES +from .event_definition import DEFAULT_EVENT_CONVERTERS + +DEFAULT_SPEC_CONFIG = { + 'process': BpmnProcessSpecConverter, + 'data_specs': [BpmnDataObjectConverter], + 'task_specs': DEFAULT_TASK_SPEC_CONVERTER_CLASSES, + 'event_definitions': DEFAULT_EVENT_CONVERTERS, +} -DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [ - SimpleTaskConverter, StartTaskConverter, EndJoinConverter, LoopResetTaskConverter, - NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter, - CallActivityTaskConverter, TransactionSubprocessTaskConverter, - StartEventConverter, EndEventConverter, SendTaskConverter, ReceiveTaskConverter, - IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter, - BoundaryEventConverter, BoundaryEventParentConverter, - ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter -] class BpmnWorkflowSerializer: """ @@ -39,21 +30,18 @@ class BpmnWorkflowSerializer: The goal is to provide modular serialization capabilities. - You'll need to configure a Workflow Spec Converter with Task Spec Converters for any task types - present in your workflows. Because the Task Spec Converters also require initialization, the process - of building a Workflow Spec Converter is a little tedious; therefore, this class provides a static - method `configure_workflow_spec_converter` that can extend and/or override the default Task Spec - Converter list and return a Workflow Spec Converter that will recognize the overridden specs. + You'll need to configure a Workflow Spec Converter with converters for any task, data, or event types + present in your workflows. - If you have implemented any custom task specs, you'll need to write a converter to handle them and - provide it to this method; if you using only the defaults, you can call this with no arguments. + If you have implemented any custom specs, you'll need to write a converter to handle them and + replace the converter from the default confiuration with your own. If your workflow contains non-JSON-serializable objects, you'll need to extend or replace the default data converter with one that will handle them. This converter needs to implement `convert` and `restore` methods. Serialization occurs in two phases: the first is to convert everything in the workflow to a - dictionary containins only JSON-serializable objects and the second is dumping to JSON. + dictionary containing only JSON-serializable objects and the second is dumping to JSON. This means that you can call the `workflow_to_dict` or `workflow_from_dict` methods separately from conversion to JSON for further manipulation of the state, or selective serialization of only certain @@ -70,36 +58,34 @@ class BpmnWorkflowSerializer: DEFAULT_JSON_DECODER_CLS = None @staticmethod - def configure_workflow_spec_converter(task_spec_overrides=None, data_converter=None, version=VERSION): + def configure_workflow_spec_converter(spec_config=None, registry=None): """ - This method can be used to add additional task spec converters to the default BPMN Process - converter. + This method can be used to create a spec converter that uses custom specs. - The task specs may contain arbitrary data, though none of the default task specs use it. We - may disallow that in the future, so we don't recommend using this capability. + The task specs may contain arbitrary data, though none of the default task specs use it. We don't + recommend that you do this, as we may disallow it in the future. However, if you have task spec data, + then you'll also need to make sure it can be serialized. - The task spec converters also take an optional typename argument; this will be included in the - serialized dictionaries so that the original class can restored. The unqualified classname is - used if none is provided. If a class in `task_spec_overrides` conflicts with one of the - defaults, the default will be removed and the provided one will be used instead. If you need - both for some reason, you'll have to instantiate the task spec converters and workflow spec - converter yourself. + The workflow spec serializer is based on the `DictionaryConverter` in the `helpers` package. You can + create one of your own, add custom data serializtion to that and pass that in as the `registry`. The + conversion classes in the spec_config will be added this "registry" and any classes with entries there + will be serialized/deserialized. - :param task_spec_overrides: a list of task spec converter classes - :param data_converter: an optional data converter for task spec data + See the documentation for `helpers.spec.BpmnSpecConverter` for more information about what's going + on here. + + :param spec_config: a dictionary specifying how to save and restore any classes used by the spec + :param registry: a `DictionaryConverter` with conversions for custom data (if applicable) """ - if task_spec_overrides is None: - task_spec_overrides = [] + config = spec_config or DEFAULT_SPEC_CONFIG + spec_converter = registry or DictionaryConverter() + config['process'](spec_converter) + for cls in config['data_specs'] + config['task_specs'] + config['event_definitions']: + cls(spec_converter) + return spec_converter - classnames = [c.__name__ for c in task_spec_overrides] - converters = [c(data_converter=data_converter) for c in task_spec_overrides] - for c in DEFAULT_TASK_SPEC_CONVERTER_CLASSES: - if c.__name__ not in classnames: - converters.append(c(data_converter=data_converter)) - return BpmnProcessSpecConverter(converters, version) - - - def __init__(self, spec_converter=None, data_converter=None, wf_class=None, version=VERSION, json_encoder_cls=DEFAULT_JSON_ENCODER_CLS, json_decoder_cls=DEFAULT_JSON_DECODER_CLS): + def __init__(self, spec_converter=None, data_converter=None, wf_class=None, version=VERSION, + json_encoder_cls=DEFAULT_JSON_ENCODER_CLS, json_decoder_cls=DEFAULT_JSON_DECODER_CLS): """Intializes a Workflow Serializer with the given Workflow, Task and Data Converters. :param spec_converter: the workflow spec converter @@ -110,7 +96,7 @@ class BpmnWorkflowSerializer: """ super().__init__() self.spec_converter = spec_converter if spec_converter is not None else self.configure_workflow_spec_converter() - self.data_converter = data_converter if data_converter is not None else BpmnDataConverter() + self.data_converter = data_converter if data_converter is not None else DefaultRegistry() self.wf_class = wf_class if wf_class is not None else BpmnWorkflow self.json_encoder_cls = json_encoder_cls self.json_decoder_cls = json_decoder_cls diff --git a/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py b/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py index d6b17c44d..8b1edb6c9 100644 --- a/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py +++ b/SpiffWorkflow/bpmn/specs/BpmnProcessSpec.py @@ -124,57 +124,3 @@ class BpmnProcessSpec(WorkflowSpec): self.data_outputs = [] self.data_objects = {} self.correlation_keys = {} - - def get_all_lanes(self): - """ - Returns a set of the distinct lane names used in the process (including - called activities) - """ - - done = set() - lanes = set() - - def recursive_find(task_spec): - if task_spec in done: - return - - done.add(task_spec) - - if hasattr(task_spec, 'lane') and task_spec.lane: - lanes.add(task_spec.lane) - - if hasattr(task_spec, 'spec'): - recursive_find(task_spec.spec.start) - - for t in task_spec.outputs: - recursive_find(t) - - recursive_find(self.start) - - return lanes - - def get_specs_depth_first(self): - """ - Get the specs for all processes (including called ones), in depth first - order. - """ - - done = set() - specs = [self] - - def recursive_find(task_spec): - if task_spec in done: - return - - done.add(task_spec) - - if hasattr(task_spec, 'spec'): - specs.append(task_spec.spec) - recursive_find(task_spec.spec.start) - - for t in task_spec.outputs: - recursive_find(t) - - recursive_find(self.start) - - return specs diff --git a/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py b/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py index cbfbf502e..173ab2a8d 100644 --- a/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py +++ b/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py @@ -4,7 +4,6 @@ from copy import deepcopy from SpiffWorkflow.task import TaskState from .BpmnSpecMixin import BpmnSpecMixin from ..exceptions import WorkflowDataException -from ...specs.base import TaskSpec class SubWorkflowTask(BpmnSpecMixin): @@ -26,9 +25,6 @@ class SubWorkflowTask(BpmnSpecMixin): def spec_type(self): return 'Subprocess' - def test(self): - TaskSpec.test(self) - def _on_ready_before_hook(self, my_task): subworkflow = my_task.workflow.create_subprocess(my_task, self.spec, self.name) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) @@ -41,10 +37,6 @@ class SubWorkflowTask(BpmnSpecMixin): def _on_subworkflow_completed(self, subworkflow, my_task): - # Shouldn't this always be true? - if isinstance(my_task.parent.task_spec, BpmnSpecMixin): - my_task.parent.task_spec._child_complete_hook(my_task) - if len(subworkflow.spec.data_outputs) == 0: # Copy all workflow data if no outputs are specified my_task.data = deepcopy(subworkflow.last_task.data) @@ -63,14 +55,7 @@ class SubWorkflowTask(BpmnSpecMixin): def _update_hook(self, my_task): wf = my_task.workflow._get_outermost_workflow(my_task) if my_task.id not in wf.subprocesses: - super()._update_hook(my_task) - - def _predict_hook(self, my_task): - # The base Subworkflow task predict doesn't work with the loop reset task - BpmnSpecMixin._predict_hook(self, my_task) - - def _on_complete_hook(self, my_task): - BpmnSpecMixin._on_complete_hook(self, my_task) + return super()._update_hook(my_task) def _on_cancel(self, my_task): subworkflow = my_task.workflow.get_subprocess(my_task) diff --git a/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py b/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py index 09e980cf5..a80349ad2 100644 --- a/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py +++ b/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py @@ -121,13 +121,10 @@ class BoundaryEvent(CatchingEvent): def catch(self, my_task, event_definition): super(BoundaryEvent, self).catch(my_task, event_definition) + # Would love to get rid of this statement and manage in the workflow + # However, it is not really compatible with how boundary events work. my_task.complete() - def _on_complete_hook(self, my_task): - super(BoundaryEvent, self)._on_complete_hook(my_task) - # Notify the boundary event parent as well. - my_task.parent.task_spec._child_complete_hook(my_task) - class EventBasedGateway(CatchingEvent): diff --git a/SpiffWorkflow/bpmn/specs/events/event_types.py b/SpiffWorkflow/bpmn/specs/events/event_types.py index 348ee4343..279bc2c20 100644 --- a/SpiffWorkflow/bpmn/specs/events/event_types.py +++ b/SpiffWorkflow/bpmn/specs/events/event_types.py @@ -46,24 +46,18 @@ class CatchingEvent(Simple, BpmnSpecMixin): definition, at which point we can update our task's state. """ self.event_definition.catch(my_task, event_definition) - self._update_hook(my_task) + my_task._set_state(TaskState.WAITING) def _update_hook(self, my_task): - if my_task.state == TaskState.WAITING and self.event_definition.has_fired(my_task): - my_task._ready() - super(CatchingEvent, self)._update_hook(my_task) - - def _on_ready_hook(self, my_task): - # None events don't propogate, so as soon as we're ready, we fire our event if isinstance(self.event_definition, NoneEventDefinition): my_task._set_internal_data(event_fired=True) - # If we have not seen the event we're waiting for, enter the waiting state - if not self.event_definition.has_fired(my_task): + if self.event_definition.has_fired(my_task): + return True + else: my_task._set_state(TaskState.WAITING) - super(CatchingEvent, self)._on_ready_hook(my_task) def _on_complete_hook(self, my_task): diff --git a/SpiffWorkflow/bpmn/workflow.py b/SpiffWorkflow/bpmn/workflow.py index 52ebf7e0f..6a564d99f 100644 --- a/SpiffWorkflow/bpmn/workflow.py +++ b/SpiffWorkflow/bpmn/workflow.py @@ -145,6 +145,9 @@ class BpmnWorkflow(Workflow): for task in tasks: task.task_spec.catch(task, event_definition) + # Move any tasks that received message to READY + self.refresh_waiting_tasks() + # Figure out if we need to create an extenal message if len(tasks) == 0 and isinstance(event_definition, MessageEventDefinition): self.bpmn_messages.append( diff --git a/SpiffWorkflow/camunda/serializer/config.py b/SpiffWorkflow/camunda/serializer/config.py new file mode 100644 index 000000000..d38007ae4 --- /dev/null +++ b/SpiffWorkflow/camunda/serializer/config.py @@ -0,0 +1,15 @@ +from copy import deepcopy + +from SpiffWorkflow.bpmn.serializer.workflow import DEFAULT_SPEC_CONFIG +from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter as DefaultUserTaskConverter +from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventConverter + +from .task_spec import UserTaskConverter +from .event_definition import MessageEventDefinitionConverter + + +CAMUNDA_SPEC_CONFIG = deepcopy(DEFAULT_SPEC_CONFIG) +CAMUNDA_SPEC_CONFIG['task_specs'].remove(DefaultUserTaskConverter) +CAMUNDA_SPEC_CONFIG['task_specs'].append(UserTaskConverter) +CAMUNDA_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventConverter) +CAMUNDA_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter) diff --git a/SpiffWorkflow/camunda/serializer/event_definition.py b/SpiffWorkflow/camunda/serializer/event_definition.py new file mode 100644 index 000000000..8efb16a03 --- /dev/null +++ b/SpiffWorkflow/camunda/serializer/event_definition.py @@ -0,0 +1,20 @@ +from SpiffWorkflow.bpmn.serializer.helpers.spec import EventDefinitionConverter +from ..specs.events.event_definitions import MessageEventDefinition + + +class MessageEventDefinitionConverter(EventDefinitionConverter): + + def __init__(self, registry): + super().__init__(MessageEventDefinition, registry) + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties) + dct['payload'] = event_definition.payload + dct['result_var'] = event_definition.result_var + return dct + + def from_dict(self, dct): + dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties']) + event_definition = super().from_dict(dct) + return event_definition diff --git a/SpiffWorkflow/camunda/serializer/task_spec.py b/SpiffWorkflow/camunda/serializer/task_spec.py new file mode 100644 index 000000000..20ef12460 --- /dev/null +++ b/SpiffWorkflow/camunda/serializer/task_spec.py @@ -0,0 +1,34 @@ +from ...bpmn.serializer.helpers.spec import TaskSpecConverter + +from ..specs.UserTask import UserTask, Form + +class UserTaskConverter(TaskSpecConverter): + + def __init__(self, registry): + super().__init__(UserTask, registry) + + def to_dict(self, spec): + dct = self.get_default_attributes(spec) + dct.update(self.get_bpmn_attributes(spec)) + dct['form'] = self.form_to_dict(spec.form) + return dct + + def from_dict(self, dct): + dct['form'] = Form(init=dct['form']) + return self.task_spec_from_dict(dct) + + def form_to_dict(self, form): + dct = {'key': form.key, 'fields': []} + for field in form.fields: + new = { + 'id': field.id, + 'default_value': field.default_value, + 'label': field.label, + 'type': field.type, + 'properties': [ prop.__dict__ for prop in field.properties ], + 'validation': [ val.__dict__ for val in field.validation ], + } + if field.type == "enum": + new['options'] = [ opt.__dict__ for opt in field.options ] + dct['fields'].append(new) + return dct diff --git a/SpiffWorkflow/camunda/serializer/task_spec_converters.py b/SpiffWorkflow/camunda/serializer/task_spec_converters.py deleted file mode 100644 index 3f997a0e3..000000000 --- a/SpiffWorkflow/camunda/serializer/task_spec_converters.py +++ /dev/null @@ -1,90 +0,0 @@ -from functools import partial - -from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent -from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent -from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent -from ..specs.events.event_definitions import MessageEventDefinition -from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter - -from ..specs.UserTask import UserTask, Form - -class CamundaEventConverter(BpmnTaskSpecConverter): - - def __init__(self, spec_class, data_converter, typename): - super().__init__(spec_class, data_converter, typename) - self.register( - MessageEventDefinition, - self.event_definition_to_dict, - partial(self.event_defintion_from_dict, MessageEventDefinition) - ) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - if isinstance(spec, BoundaryEvent): - dct['cancel_activity'] = spec.cancel_activity - dct['event_definition'] = self.convert(spec.event_definition) - return dct - - def from_dict(self, dct): - dct['event_definition'] = self.restore(dct['event_definition']) - return self.task_spec_from_dict(dct) - - def event_definition_to_dict(self, event_definition): - dct = super().event_definition_to_dict(event_definition) - if isinstance(event_definition, MessageEventDefinition): - dct['payload'] = event_definition.payload - dct['result_var'] = event_definition.result_var - return dct - - -class StartEventConverter(CamundaEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(StartEvent, data_converter, typename) - -class EndEventConverter(CamundaEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(EndEvent, data_converter, typename) - -class BoundaryEventConverter(CamundaEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(BoundaryEvent, data_converter, typename) - -class IntermediateCatchEventConverter(CamundaEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(IntermediateCatchEvent, data_converter, typename) - -class IntermediateThrowEventConverter(CamundaEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(IntermediateThrowEvent, data_converter, typename) - -class UserTaskConverter(CamundaEventConverter): - - def __init__(self, data_converter=None, typename=None): - super().__init__(UserTask, data_converter, typename) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct['form'] = self.form_to_dict(spec.form) - return dct - - def from_dict(self, dct): - dct['form'] = Form(init=dct['form']) - return self.task_spec_from_dict(dct) - - def form_to_dict(self, form): - dct = {'key': form.key, 'fields': []} - for field in form.fields: - new = { - 'id': field.id, - 'default_value': field.default_value, - 'label': field.label, - 'type': field.type, - 'properties': [ prop.__dict__ for prop in field.properties ], - 'validation': [ val.__dict__ for val in field.validation ], - } - if field.type == "enum": - new['options'] = [ opt.__dict__ for opt in field.options ] - dct['fields'].append(new) - return dct diff --git a/SpiffWorkflow/dmn/parser/BpmnDmnParser.py b/SpiffWorkflow/dmn/parser/BpmnDmnParser.py index c1b9799f8..349186c84 100644 --- a/SpiffWorkflow/dmn/parser/BpmnDmnParser.py +++ b/SpiffWorkflow/dmn/parser/BpmnDmnParser.py @@ -74,11 +74,8 @@ class BpmnDmnParser(BpmnParser): Add all filenames in the given list to the parser's set. """ for filename in filenames: - f = open(filename, 'r') - try: + with open(filename, 'r') as f: self.add_dmn_xml(etree.parse(f).getroot(), filename=filename) - finally: - f.close() def get_dependencies(self): return self.process_dependencies.union(self.dmn_dependencies) diff --git a/SpiffWorkflow/dmn/serializer/task_spec_converters.py b/SpiffWorkflow/dmn/serializer/task_spec.py similarity index 93% rename from SpiffWorkflow/dmn/serializer/task_spec_converters.py rename to SpiffWorkflow/dmn/serializer/task_spec.py index 9e78c418f..99519351b 100644 --- a/SpiffWorkflow/dmn/serializer/task_spec_converters.py +++ b/SpiffWorkflow/dmn/serializer/task_spec.py @@ -1,14 +1,14 @@ -from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter +from ...bpmn.serializer.helpers.spec import TaskSpecConverter from ..specs.BusinessRuleTask import BusinessRuleTask from ..specs.model import DecisionTable, Rule, HitPolicy from ..specs.model import Input, InputEntry, Output, OutputEntry from ..engine.DMNEngine import DMNEngine -class BusinessRuleTaskConverter(BpmnTaskSpecConverter): +class BusinessRuleTaskConverter(TaskSpecConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(BusinessRuleTask, data_converter, typename) + def __init__(self, registry): + super().__init__(BusinessRuleTask, registry) def to_dict(self, spec): dct = self.get_default_attributes(spec) diff --git a/SpiffWorkflow/specs/AcquireMutex.py b/SpiffWorkflow/specs/AcquireMutex.py index e2b931522..59c712037 100644 --- a/SpiffWorkflow/specs/AcquireMutex.py +++ b/SpiffWorkflow/specs/AcquireMutex.py @@ -51,9 +51,9 @@ class AcquireMutex(TaskSpec): mutex = my_task.workflow._get_mutex(self.mutex) if mutex.testandset(): self.entered_event.emit(my_task.workflow, my_task) - my_task._ready() - return - my_task._set_state(TaskState.WAITING) + return True + else: + my_task._set_state(TaskState.WAITING) def serialize(self, serializer): return serializer.serialize_acquire_mutex(self) diff --git a/SpiffWorkflow/specs/Celery.py b/SpiffWorkflow/specs/Celery.py index 85c4d1340..b51d98e9e 100644 --- a/SpiffWorkflow/specs/Celery.py +++ b/SpiffWorkflow/specs/Celery.py @@ -248,8 +248,8 @@ class Celery(TaskSpec): if not self._start(my_task): if not my_task._has_state(TaskState.WAITING): my_task._set_state(TaskState.WAITING) - return - super(Celery, self)._update_hook(my_task) + else: + return True def serialize(self, serializer): return serializer.serialize_celery(self) diff --git a/SpiffWorkflow/specs/Execute.py b/SpiffWorkflow/specs/Execute.py index a05ea65a0..2988105eb 100644 --- a/SpiffWorkflow/specs/Execute.py +++ b/SpiffWorkflow/specs/Execute.py @@ -73,8 +73,8 @@ class Execute(TaskSpec): def _update_hook(self, my_task): if not self._start(my_task): my_task._set_state(TaskState.WAITING) - return - super(Execute, self)._update_hook(my_task) + else: + return super(Execute, self)._update_hook(my_task) def serialize(self, serializer): return serializer.serialize_execute(self) diff --git a/SpiffWorkflow/specs/Gate.py b/SpiffWorkflow/specs/Gate.py index 4605e05d2..006e796e7 100644 --- a/SpiffWorkflow/specs/Gate.py +++ b/SpiffWorkflow/specs/Gate.py @@ -60,7 +60,7 @@ class Gate(TaskSpec): if not task._has_state(TaskState.COMPLETED): my_task._set_state(TaskState.WAITING) return - super(Gate, self)._update_hook(my_task) + return True def serialize(self, serializer): return serializer.serialize_gate(self) diff --git a/SpiffWorkflow/specs/Join.py b/SpiffWorkflow/specs/Join.py index 0a16a64c5..f1ba4a622 100644 --- a/SpiffWorkflow/specs/Join.py +++ b/SpiffWorkflow/specs/Join.py @@ -218,24 +218,16 @@ class Join(TaskSpec): def _update_hook(self, my_task): # Check whether enough incoming branches have completed. may_fire, waiting_tasks = self._start(my_task) - if not may_fire: + if may_fire: + # If this is a cancelling join, cancel all incoming branches except for the one that just completed. + if self.cancel_remaining: + for task in waiting_tasks: + task.cancel() + + # Update the state of our child objects. + self._do_join(my_task) + else: my_task._set_state(TaskState.WAITING) - return - - # If this is a cancelling join, cancel all incoming branches, - # except for the one that just completed. - if self.cancel_remaining: - for task in waiting_tasks: - task.cancel() - - # We do NOT set the task state to COMPLETED, because in - # case all other incoming tasks get cancelled (or never reach - # the Join for other reasons, such as reaching a stub branch), - # we need to revisit it. - my_task._ready() - - # Update the state of our child objects. - self._do_join(my_task) def _do_join(self, my_task): diff --git a/SpiffWorkflow/specs/SubWorkflow.py b/SpiffWorkflow/specs/SubWorkflow.py index 3e18acaed..5971f2fe2 100644 --- a/SpiffWorkflow/specs/SubWorkflow.py +++ b/SpiffWorkflow/specs/SubWorkflow.py @@ -127,7 +127,7 @@ class SubWorkflow(TaskSpec): subworkflow = my_task._get_internal_data('subworkflow') if subworkflow is None: # On the first update, we have to create the subworkflow - super()._update_hook(my_task) + return True elif subworkflow.is_completed(): # Then wait until it finishes to complete my_task.complete() diff --git a/SpiffWorkflow/specs/Transform.py b/SpiffWorkflow/specs/Transform.py index 9f35df95a..1f0ba5186 100644 --- a/SpiffWorkflow/specs/Transform.py +++ b/SpiffWorkflow/specs/Transform.py @@ -55,7 +55,7 @@ class Transform(TaskSpec): for transform in self.transforms: logger.debug(f'Execute transform', extra=my_task.log_info({'transform': transform})) exec(transform) - super(Transform, self)._update_hook(my_task) + return True def serialize(self, serializer): s_state = serializer.serialize_simple(self) diff --git a/SpiffWorkflow/specs/base.py b/SpiffWorkflow/specs/base.py index 6a6b10c39..6daf93b44 100644 --- a/SpiffWorkflow/specs/base.py +++ b/SpiffWorkflow/specs/base.py @@ -273,27 +273,18 @@ class TaskSpec(object): completes it makes sure to call this method so we can react. """ my_task._inherit_data() - # We were doing this in _update_hook, but to me that seems inconsistent with the spirit - # of the hook functions. Moving it here allows removal of some repeated calls (overridden - # hook methods still need to do these things) if my_task._is_predicted(): self._predict(my_task) self.entered_event.emit(my_task.workflow, my_task) - self._update_hook(my_task) + if self._update_hook(my_task): + my_task._ready() def _update_hook(self, my_task): """ - Typically this method should perform the following actions:: - - - Update the state of the corresponding task. - - Update the predictions for its successors. - - Returning non-False will cause the task to go into READY. - Returning any other value will cause no action. + This method should decide whether the task should run now or need to wait. + Returning True will cause the task to go into READY. """ - # If this actually did what the documentation said (returned a value indicating - # that the task was ready), then a lot of things might be easier. - my_task._ready() + return True def _on_ready(self, my_task): """ @@ -390,7 +381,9 @@ class TaskSpec(object): my_task.workflow.last_task = my_task self._on_complete_hook(my_task) for child in my_task.children: - child.task_spec._update(child) + # Don't like this, but this is the most expedient way of preventing cancelled tasks from reactivation + if child.state != TaskState.CANCELLED: + child.task_spec._update(child) my_task.workflow._task_completed_notify(my_task) self.completed_event.emit(my_task.workflow, my_task) diff --git a/SpiffWorkflow/spiff/event_definition.py b/SpiffWorkflow/spiff/event_definition.py new file mode 100644 index 000000000..e69de29bb diff --git a/SpiffWorkflow/spiff/serializer/config.py b/SpiffWorkflow/spiff/serializer/config.py new file mode 100644 index 000000000..61b5cbd90 --- /dev/null +++ b/SpiffWorkflow/spiff/serializer/config.py @@ -0,0 +1,65 @@ +from copy import deepcopy + +from SpiffWorkflow.bpmn.serializer.workflow import DEFAULT_SPEC_CONFIG +from SpiffWorkflow.bpmn.serializer.task_spec import ( + SimpleTaskConverter, + StartTaskConverter, + EndJoinConverter, + LoopResetTaskConverter, + StartEventConverter, + EndEventConverter, + IntermediateCatchEventConverter, + IntermediateThrowEventConverter, + EventBasedGatewayConverter, + BoundaryEventConverter, + BoundaryEventParentConverter, + ParallelGatewayConverter, + ExclusiveGatewayConverter, + InclusiveGatewayConverter, +) + +from .task_spec import ( + NoneTaskConverter, + ManualTaskConverter, + UserTaskConverter, + SendTaskConverter, + ReceiveTaskConverter, + ScriptTaskConverter, + ServiceTaskConverter, + SubWorkflowTaskConverter, + TransactionSubprocessConverter, + CallActivityTaskConverter, +) + +from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventDefinitionConverter +from .event_definition import MessageEventDefinitionConverter + +SPIFF_SPEC_CONFIG = deepcopy(DEFAULT_SPEC_CONFIG) +SPIFF_SPEC_CONFIG['task_specs'] = [ + SimpleTaskConverter, + StartTaskConverter, + EndJoinConverter, + LoopResetTaskConverter, + StartEventConverter, + EndEventConverter, + IntermediateCatchEventConverter, + IntermediateThrowEventConverter, + EventBasedGatewayConverter, + BoundaryEventConverter, + BoundaryEventParentConverter, + ParallelGatewayConverter, + ExclusiveGatewayConverter, + InclusiveGatewayConverter, + NoneTaskConverter, + ManualTaskConverter, + UserTaskConverter, + SendTaskConverter, + ReceiveTaskConverter, + ScriptTaskConverter, + ServiceTaskConverter, + SubWorkflowTaskConverter, + TransactionSubprocessConverter, + CallActivityTaskConverter, +] +SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventDefinitionConverter) +SPIFF_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter) \ No newline at end of file diff --git a/SpiffWorkflow/spiff/serializer/event_definition.py b/SpiffWorkflow/spiff/serializer/event_definition.py new file mode 100644 index 000000000..7c029b493 --- /dev/null +++ b/SpiffWorkflow/spiff/serializer/event_definition.py @@ -0,0 +1,20 @@ +from SpiffWorkflow.bpmn.serializer.helpers.spec import EventDefinitionConverter + +from SpiffWorkflow.spiff.specs.events.event_definitions import MessageEventDefinition + +class MessageEventDefinitionConverter(EventDefinitionConverter): + + def __init__(self, registry): + super().__init__(MessageEventDefinition, registry) + + def to_dict(self, event_definition): + dct = super().to_dict(event_definition) + dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties) + dct['expression'] = event_definition.expression + dct['message_var'] = event_definition.message_var + return dct + + def from_dict(self, dct): + dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties']) + event_definition = super().from_dict(dct) + return event_definition \ No newline at end of file diff --git a/SpiffWorkflow/spiff/serializer/task_spec.py b/SpiffWorkflow/spiff/serializer/task_spec.py new file mode 100644 index 000000000..367a230a8 --- /dev/null +++ b/SpiffWorkflow/spiff/serializer/task_spec.py @@ -0,0 +1,115 @@ +from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter + +from SpiffWorkflow.spiff.specs.none_task import NoneTask +from SpiffWorkflow.spiff.specs.manual_task import ManualTask +from SpiffWorkflow.spiff.specs.user_task import UserTask +from SpiffWorkflow.spiff.specs.script_task import ScriptTask +from SpiffWorkflow.spiff.specs.service_task import ServiceTask +from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity +from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask + + +class SpiffBpmnTaskConverter(TaskSpecConverter): + + def to_dict(self, spec): + dct = self.get_default_attributes(spec) + dct.update(self.get_bpmn_attributes(spec)) + dct['prescript'] = spec.prescript + dct['postscript'] = spec.postscript + return dct + + def from_dict(self, dct): + return self.task_spec_from_dict(dct) + + +class NoneTaskConverter(SpiffBpmnTaskConverter): + def __init__(self, registry): + super().__init__(NoneTask, registry) + + +class ManualTaskConverter(SpiffBpmnTaskConverter): + def __init__(self, registry): + super().__init__(ManualTask, registry) + + +class UserTaskConverter(SpiffBpmnTaskConverter): + def __init__(self, registry): + super().__init__(UserTask, registry) + + +class SendTaskConverter(SpiffBpmnTaskConverter): + + def __init__(self, registry, typename=None): + super().__init__(SendTask, registry, typename) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['event_definition'] = self.registry.convert(spec.event_definition) + return dct + + def from_dict(self, dct): + dct['event_definition'] = self.registry.restore(dct['event_definition']) + return super().from_dict(dct) + + +class ReceiveTaskConverter(SpiffBpmnTaskConverter): + def __init__(self, registry, typename=None): + super().__init__(ReceiveTask, registry, typename) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['event_definition'] = self.registry.convert(spec.event_definition) + return dct + + def from_dict(self, dct): + dct['event_definition'] = self.registry.restore(dct['event_definition']) + return super().from_dict(dct) + + +class ScriptTaskConverter(SpiffBpmnTaskConverter): + def __init__(self, registry): + super().__init__(ScriptTask, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['script'] = spec.script + return dct + + +class ServiceTaskConverter(SpiffBpmnTaskConverter): + def __init__(self, registry): + super().__init__(ServiceTask, registry) + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct['operation_name'] = spec.operation_name + dct['operation_params'] = spec.operation_params + dct['result_variable'] = spec.result_variable + return dct + + def from_dict(self, dct): + return self.task_spec_from_dict(dct) + + +class SubprocessTaskConverter(SpiffBpmnTaskConverter): + + def to_dict(self, spec): + dct = super().to_dict(spec) + dct.update(self.get_subworkflow_attributes(spec)) + return dct + + def from_dict(self, dct): + dct['subworkflow_spec'] = dct.pop('spec') + return super().task_spec_from_dict(dct) + +class SubWorkflowTaskConverter(SubprocessTaskConverter): + def __init__(self, registry): + super().__init__(SubWorkflowTask, registry) + +class TransactionSubprocessConverter(SubprocessTaskConverter): + def __init__(self, registry): + super().__init__(TransactionSubprocess, registry) + +class CallActivityTaskConverter(SubprocessTaskConverter): + def __init__(self, registry): + super().__init__(CallActivity, registry) diff --git a/SpiffWorkflow/spiff/serializer/task_spec_converters.py b/SpiffWorkflow/spiff/serializer/task_spec_converters.py deleted file mode 100644 index abf3614b2..000000000 --- a/SpiffWorkflow/spiff/serializer/task_spec_converters.py +++ /dev/null @@ -1,170 +0,0 @@ -from functools import partial - -from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter -from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent -from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent -from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent, EventBasedGateway -from SpiffWorkflow.spiff.specs.none_task import NoneTask -from SpiffWorkflow.spiff.specs.manual_task import ManualTask -from SpiffWorkflow.spiff.specs.user_task import UserTask -from SpiffWorkflow.spiff.specs.script_task import ScriptTask -from SpiffWorkflow.spiff.specs.service_task import ServiceTask -from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity -from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask -from SpiffWorkflow.spiff.specs.events.event_definitions import MessageEventDefinition - - -class SpiffBpmnTaskConverter(BpmnTaskSpecConverter): - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - dct['prescript'] = spec.prescript - dct['postscript'] = spec.postscript - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class NoneTaskConverter(SpiffBpmnTaskConverter): - def __init__(self, data_converter=None): - super().__init__(NoneTask, data_converter) - - -class ManualTaskConverter(SpiffBpmnTaskConverter): - def __init__(self, data_converter=None): - super().__init__(ManualTask, data_converter) - - -class UserTaskConverter(SpiffBpmnTaskConverter): - def __init__(self, data_converter=None): - super().__init__(UserTask, data_converter) - - -class ScriptTaskConverter(SpiffBpmnTaskConverter): - def __init__(self, data_converter=None): - super().__init__(ScriptTask, data_converter) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['script'] = spec.script - return dct - - -class ServiceTaskConverter(SpiffBpmnTaskConverter): - def __init__(self, data_converter=None): - super().__init__(ServiceTask, data_converter) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['operation_name'] = spec.operation_name - dct['operation_params'] = spec.operation_params - dct['result_variable'] = spec.result_variable - return dct - - def from_dict(self, dct): - return self.task_spec_from_dict(dct) - - -class SubprocessTaskConverter(SpiffBpmnTaskConverter): - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct.update(self.get_subworkflow_attributes(spec)) - return dct - - def from_dict(self, dct): - dct['subworkflow_spec'] = dct.pop('spec') - return super().task_spec_from_dict(dct) - -class SubWorkflowTaskConverter(SubprocessTaskConverter): - - def __init__(self, data_converter=None): - super().__init__(SubWorkflowTask, data_converter) - - -class TransactionSubprocessConverter(SubprocessTaskConverter): - - def __init__(self, data_converter=None): - super().__init__(TransactionSubprocess, data_converter) - - -class CallActivityTaskConverter(SubprocessTaskConverter): - - def __init__(self, data_converter=None): - super().__init__(CallActivity, data_converter) - -class SpiffEventConverter(BpmnTaskSpecConverter): - - def __init__(self, spec_class, data_converter, typename): - super().__init__(spec_class, data_converter, typename) - self.register( - MessageEventDefinition, - self.event_definition_to_dict, - partial(self.event_defintion_from_dict, MessageEventDefinition) - ) - - def to_dict(self, spec): - dct = self.get_default_attributes(spec) - dct.update(self.get_bpmn_attributes(spec)) - if isinstance(spec, BoundaryEvent): - dct['cancel_activity'] = spec.cancel_activity - dct['event_definition'] = self.convert(spec.event_definition) - return dct - - def from_dict(self, dct): - dct['event_definition'] = self.restore(dct['event_definition']) - return self.task_spec_from_dict(dct) - - def event_definition_to_dict(self, event_definition): - dct = super().event_definition_to_dict(event_definition) - if isinstance(event_definition, MessageEventDefinition): - dct['expression'] = event_definition.expression - dct['message_var'] = event_definition.message_var - return dct - - -class StartEventConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(StartEvent, data_converter, typename) - -class EndEventConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(EndEvent, data_converter, typename) - -class BoundaryEventConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(BoundaryEvent, data_converter, typename) - -class IntermediateCatchEventConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(IntermediateCatchEvent, data_converter, typename) - -class IntermediateThrowEventConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(IntermediateThrowEvent, data_converter, typename) - -class SendTaskConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(SendTask, data_converter, typename) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['prescript'] = spec.prescript - dct['postscript'] = spec.postscript - return dct - -class ReceiveTaskConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(ReceiveTask, data_converter, typename) - - def to_dict(self, spec): - dct = super().to_dict(spec) - dct['prescript'] = spec.prescript - dct['postscript'] = spec.postscript - return dct - -class EventBasedGatewayConverter(SpiffEventConverter): - def __init__(self, data_converter=None, typename=None): - super().__init__(EventBasedGateway, data_converter, typename) \ No newline at end of file diff --git a/SpiffWorkflow/task.py b/SpiffWorkflow/task.py index 6d4a918aa..ce0ce7593 100644 --- a/SpiffWorkflow/task.py +++ b/SpiffWorkflow/task.py @@ -403,14 +403,6 @@ class Task(object, metaclass=DeprecatedMetaTask): def __iter__(self): return Task.Iterator(self) - def __setstate__(self, dict): - self.__dict__.update(dict) - # If unpickled in the same Python process in which a workflow - # (Task) is built through the API, we need to make sure - # that there will not be any ID collisions. - if dict['thread_id'] >= self.__class__.thread_id_pool: - self.__class__.thread_id_pool = dict['thread_id'] - def _get_root(self): """ Returns the top level parent. @@ -752,10 +744,9 @@ class Task(object, metaclass=DeprecatedMetaTask): has changed (e.g. from FUTURE to COMPLETED.) """ self._set_state(TaskState.COMPLETED) - # WHY on earth do we mark the task completed and THEN attempt to execute it. - # A sane model would have success and failure states and instead we return - # a boolean, with no systematic way of dealing with failures. This is just - # crazy! + # I am taking back my previous comment about running the task after it's completed being "CRAZY" + # Turns out that tasks are in fact supposed to be complete at this point and I've been wrong all along + # about when tasks should actually be executed start = time.time() retval = self.task_spec._on_complete(self) extra = self.log_info({ diff --git a/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py b/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py index 9ebade906..c0b3cc790 100644 --- a/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py +++ b/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box class BoxDeepCopyTest(unittest.TestCase): diff --git a/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py b/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py index 9f90268da..2623e6887 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py +++ b/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py @@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser from SpiffWorkflow.bpmn.parser.task_parsers import ConditionalGatewayParser from SpiffWorkflow.bpmn.parser.util import full_tag -from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter +from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter # Many of our tests relied on the Packager to set the calledElement attribute on # Call Activities. I've moved that code to a customized parser. @@ -35,9 +35,6 @@ class TestUserTask(UserTask): task.set_data(choice=choice) task.complete() - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_generic(wf_spec, s_state, TestUserTask) class TestExclusiveGatewayParser(ConditionalGatewayParser): @@ -47,7 +44,7 @@ class TestExclusiveGatewayParser(ConditionalGatewayParser): return cond return "choice == '%s'" % sequence_flow_node.get('name', None) -class TestUserTaskConverter(BpmnTaskSpecConverter): +class TestUserTaskConverter(TaskSpecConverter): def __init__(self, data_converter=None): super().__init__(TestUserTask, data_converter) diff --git a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py index 8f2f0af53..ba564abc8 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py +++ b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py @@ -7,13 +7,16 @@ from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator from SpiffWorkflow.task import TaskState -from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer +from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer, DEFAULT_SPEC_CONFIG +from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter from .BpmnLoaderForTests import TestUserTaskConverter, TestBpmnParser __author__ = 'matth' +DEFAULT_SPEC_CONFIG['task_specs'].append(TestUserTaskConverter) -wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter]) + +wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(spec_config=DEFAULT_SPEC_CONFIG) class BpmnWorkflowTestCase(unittest.TestCase): diff --git a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py index 8cbca47f5..d2b218862 100644 --- a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py +++ b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py @@ -4,6 +4,7 @@ import unittest from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -17,8 +18,8 @@ class CustomBpmnScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = {'custom_function': my_custom_function} - super().__init__(scripting_additions=augment_methods) + environment = TaskDataEnvironment({'custom_function': my_custom_function}) + super().__init__(environment=environment) class CustomInlineScriptTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py b/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py index 6fe07dece..474e988db 100644 --- a/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py +++ b/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py @@ -3,6 +3,7 @@ import unittest from SpiffWorkflow.bpmn.FeelLikeScriptEngine import FeelLikeScriptEngine, FeelInterval +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase import datetime @@ -12,7 +13,7 @@ __author__ = 'matth' class FeelExpressionTest(BpmnWorkflowTestCase): def setUp(self): - self.expressionEngine = FeelLikeScriptEngine() + self.expressionEngine = FeelLikeScriptEngine(environment=BoxedTaskDataEnvironment()) def testRunThroughExpressions(self): tests = [("string length('abcd')", 4, {}), @@ -62,7 +63,7 @@ class FeelExpressionTest(BpmnWorkflowTestCase): ] } x = self.expressionEngine._evaluate( - """sum([1 for x in exclusive if x.get('ExclusiveSpaceAMComputingID',None)==None])""", + """sum([1 for x in exclusive if x.get('ExclusiveSpaceAMComputingID',None)==None])""", data ) self.assertEqual(x, 1) diff --git a/tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py b/tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py deleted file mode 100644 index 4d4100258..000000000 --- a/tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- - - - -import unittest -from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase - -__author__ = 'kellym' - - -class NavListExclusiveGatewayTest(BpmnWorkflowTestCase): - """The example bpmn diagram looks roughly like this, a gateway - that leads to two different end points - - [Step 1] -> - -> 'False' -> [Alternate End] -> END A - -> 'True' -> [Step 2] -> END B - """ - - def setUp(self): - self.spec = self.load_workflow1_spec() - - def load_workflow1_spec(self): - return self.load_workflow_spec('ExclusiveGatewayMultipleEndNavigation.bpmn','ExclusiveGatewayMultipleEndNavigation') - - def testRunThroughHappy(self): - - self.workflow = BpmnWorkflow(self.spec) - self.workflow.do_engine_steps() - nav_list = self.workflow.get_nav_list() - self.assertEqual(6, len(nav_list)) - - self.assertEqual("Step 1", nav_list[0]["description"]) - self.assertEqual("GatewayToEnd", nav_list[1]["description"]) - self.assertEqual("False", nav_list[2]["description"]) - self.assertEqual("Step End", nav_list[3]["description"]) - self.assertEqual("True", nav_list[4]["description"]) - self.assertEqual("Step 2", nav_list[5]["description"]) - - self.assertEqual(0, nav_list[0]["indent"]) - - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(NavListExclusiveGatewayTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py b/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py new file mode 100644 index 000000000..cb9c40c07 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py @@ -0,0 +1,80 @@ +import json + +from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.task import TaskState + +def example_global(): + pass + +class NonTaskDataExampleEnvironment(BasePythonScriptEngineEnvironment): + def __init__(self, environment_globals, environment): + self.environment = environment + self.environment.update(environment_globals) + super().__init__(environment_globals) + + def evaluate(self, expression, context, external_methods=None): + pass + + def execute(self, script, context, external_methods=None): + self.environment.update(context) + self.environment.update(external_methods or {}) + exec(script, self.environment) + self.environment = {k: v for k, v in self.environment.items() if k not in external_methods} + + def user_defined_values(self): + return {k: v for k, v in self.environment.items() if k not in self.globals} + +class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase): + + def setUp(self): + spec, subprocesses = self.load_workflow_spec('task_data_size.bpmn', 'Process_ccz6oq2') + self.workflow = BpmnWorkflow(spec, subprocesses) + + def testTaskDataSizeWithDefaultPythonScriptEngine(self): + self.workflow.do_engine_steps() + + self.assertIn("a", self.workflow.data) + self.assertIn("b", self.workflow.data) + self.assertIn("c", self.workflow.data) + self.assertIn("d", self.workflow.data) + + task_data_len = self._get_task_data_len() + d_uniques = set(self.workflow.data["d"]) + d_len = len(self.workflow.data["d"]) + + self.assertGreater(task_data_len, 15000) + self.assertEqual(d_len, 512*3) + self.assertEqual(d_uniques, {"a", "b", "c"}) + + def testTaskDataSizeWithNonTaskDataEnvironmentBasedPythonScriptEngine(self): + script_engine_environment = NonTaskDataExampleEnvironment({"example_global": example_global}, {}) + script_engine = PythonScriptEngine(environment=script_engine_environment) + self.workflow.script_engine = script_engine + + self.workflow.do_engine_steps() + self.workflow.data.update(script_engine.environment.user_defined_values()) + + self.assertIn("a", self.workflow.data) + self.assertIn("b", self.workflow.data) + self.assertIn("c", self.workflow.data) + self.assertIn("d", self.workflow.data) + self.assertNotIn("example_global", self.workflow.data) + + task_data_len = self._get_task_data_len() + d_uniques = set(self.workflow.data["d"]) + d_len = len(self.workflow.data["d"]) + + self.assertEqual(task_data_len, 2) + self.assertEqual(d_len, 512*3) + self.assertEqual(d_uniques, {"a", "b", "c"}) + + def _get_task_data_len(self): + tasks_to_check = self.workflow.get_tasks(TaskState.FINISHED_MASK) + task_data = [task.data for task in tasks_to_check] + task_data_to_check = list(filter(len, task_data)) + task_data_len = len(json.dumps(task_data_to_check)) + return task_data_len + diff --git a/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py b/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py index 072d93757..0b3d56033 100644 --- a/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py +++ b/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py @@ -4,6 +4,7 @@ import datetime import unittest from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -14,10 +15,10 @@ class CustomScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = { + environment = TaskDataEnvironment({ 'timedelta': datetime.timedelta, - } - super().__init__(scripting_additions=augment_methods) + }) + super().__init__(environment=environment) class TooManyLoopsTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn b/tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn deleted file mode 100644 index 87b743e07..000000000 --- a/tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn +++ /dev/null @@ -1,746 +0,0 @@ - - - - - Flow_0kcrx5l - - - - Flow_0kcrx5l - Flow_1seuuie - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_12obxbo - Flow_1y4gjsg - - - Flow_02614fd - Flow_0c4tt8e - ro.chair = {} -ro.chair.uid = RO_Chair_CID -ro.chair.name_degree = RO_Chair_Name_Degree -ro.chair.title = RO_Chair_Title -ro.chair.sig_block = RO_Chair_Sig_Block - - - Flow_1seuuie - Flow_1ni06mz - Flow_1y9edqt - - - Flow_1y9edqt - Flow_1oriwwz - Flow_185jvp3 - - - Flow_185jvp3 - Flow_1dh8c45 - sch_enum = [] -if pi.E0.schoolAbbrv != "MD": - sch_enum_md = [ - { - "value": "MD", - "label": "Medicine" - }, - ] -else: - sch_enum_md = [] -if pi.E0.schoolAbbrv != "AS": - sch_enum_as = [ - { - "value": "AS", - "label": "Arts & Science" - }, - ] -else: - sch_enum_as = [] -if pi.E0.schoolAbbrv != "CU": - sch_enum_cu = [ - { - "value": "CU", - "label": "Education" - }, - ] -else: - sch_enum_cu = [] -if pi.E0.schoolAbbrv != "NR": - sch_enum_nr = [ - { - "value": "NR", - "label": "Nursing" - }, - ] -else: - sch_enum_nr = [] -sch_enum = sch_enum_md + sch_enum_as + sch_enum_cu + sch_enum_nr -del(sch_enum_md) -del(sch_enum_as) -del(sch_enum_cu) -del(sch_enum_nr) - - - - - - - - - - - - - - Flow_1dh8c45 - Flow_0mf9npl - - - Flow_1oriwwz - Flow_0nmpxmc - Flow_12obxbo - Flow_03s8gvx - Flow_0nzochy - Flow_0h955ao - - - Flow_1y4gjsg - Flow_0lnb8jw - Flow_1fqtd41 - Flow_0a626ba - - - Flow_0a626ba - Flow_0ssrpqx - if PIsPrimaryDepartmentSameAsRO.value == "diffSchool": - ro.schoolName = RO_StudySchool.label - ro.schoolAbbrv = RO_StudySchool.value - -if PIsPrimaryDepartmentSameAsRO.value != "yes": - if ro.schoolAbbrv == "MD": - ro.deptName = RO_StudyDeptMedicine.label - ro.deptAbbrv = RO_StudyDeptMedicine.value - elif ro.schoolAbbrv == "AS": - ro.deptName = RO_StudyDeptArtsSciences.label - ro.deptAbbrv = RO_StudyDeptArtsSciences.value - elif ro.schoolAbbrv == "CU": - ro.deptName = RO_StudyDeptEducation.label - ro.deptAbbrv = RO_StudyDeptEducation.value - else: - ro.deptName = "" - ro.deptAbbrv = "" - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0nzochy - Flow_0lnb8jw - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0h955ao - Flow_1fqtd41 - - - Flow_0mf9npl - Flow_0nmpxmc - ro.schoolName = RO_StudySchool.label -ro.schoolAbbrv = RO_StudySchool.value - - - Flow_03s8gvx - Flow_0ssrpqx - Flow_0tnnt3b - - - ro.schoolAbbrv == "CU" - - - - - - - PIsPrimaryDepartmentSameAsRO.value != "yes" - - - - PIsPrimaryDepartmentSameAsRO.value == 'diffSchool' - - - - - - ro.schoolAbbrv not in ["MD", "AS", "CU"] - - - - ro.schoolAbbrv == "AS" - - - - - - - - Flow_1ni06mz - Flow_0tnnt3b - Flow_02614fd - - - temp - Flow_15xpsq8 - Flow_1g7q28p - - - Flow_0cqbu1f - Flow_1d4sb3d - Flow_12oux1f - Flow_0ygr7cu - - - The following Primary Coordinators were entered in Protocol Builder: -{%+ for key, value in pcs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_pcs %}, {% endif %}{% endfor %} -To Save the current settings for all Primary Coordinators, select Save All. - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - -### Please provide supplemental information for: - #### {{ pc.display_name }} - ##### Title: {{ pc.title }} - - ##### Department: {{ pc.department }} - ##### Affiliation: {{ pc.affiliation }} - - - - - - - Flow_12oux1f - Flow_1ik148z - - - - Flow_0c4tt8e - Flow_05g7d16 - Flow_13zasb1 - - - The PI is also the RO Chair - Flow_13zasb1 - Flow_0cqbu1f - - - Flow_0efu6u1 - Flow_0a3fjzp - Flow_0ljn2v6 - Flow_0pdoc38 - - - The following Sub-Investigators were entered in Protocol Builder: -{%+ for key, value in subs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_subs %}, {% endif %}{% endfor %} -To Save the current settings for all Sub-Investigators, select Save All. - - -Otherwise, edit each Sub-Investigator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ sub.display_name }} - ##### Title: {{ sub.title }} - - ##### Department: {{ sub.department }} - ##### Affiliation: {{ sub.affiliation }} - - - - - - - Flow_0ljn2v6 - Flow_07vu2b0 - - - - Flow_1ik148z - Flow_0ygr7cu - Flow_0a3fjzp - Flow_0rstqv5 - - - The following Additional Coordinators were entered in Protocol Builder: -{%+ for key, value in acs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_acs %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Coordinators, select Save All. - - - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ acs.display_name }} - ##### Title: {{ acs.title }} - - - ##### Department: {{ acs.department }} - ##### Affiliation: {{ acs.affiliation }} - Flow_0rstqv5 - Flow_0efu6u1 - - - - Flow_0pdoc38 - Flow_07vu2b0 - Flow_1g7q28p - Flow_0qti1ms - - - The following Additional Personnel were entered in Protocol Builder: -{%+ for key, value in aps.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_aps %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Personnel, select Save All. - - - - -Otherwise, edit each Additional Personnel as necessary and select the Save button for each. - - - - -### Please provide supplemental information for: - #### {{ ap.display_name }} - ##### Title: {{ ap.title }} - - - ##### Department: {{ ap.department }} - ##### Affiliation: {{ ap.affiliation }} - - - - - - - Flow_0qti1ms - Flow_15xpsq8 - - - - ***Name & Degree:*** {{ RO_Chair_Name_Degree }} -***School:*** {{ RO_School }} -***Department:*** {{ RO_Department }} -***Title:*** {{ RO_Chair_Title }} -***Email:*** {{ RO_Chair_CID }} - - -{% if RO_Chair_CID != dc.uid %} - *Does not match the Department Chair specified in Protocol Builder, {{ dc.display_name }}* -{% endif %} - - - - - - - - - - Flow_05g7d16 - Flow_1d4sb3d - - - - - - - - - - - RO_Chair_CID == pi.uid - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn b/tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn deleted file mode 100644 index 28c4a5538..000000000 --- a/tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn +++ /dev/null @@ -1,143 +0,0 @@ - - - - - Flow_0kcrx5l - - - ##### Please confirm Primary Investigator entered in Protocol Builder is correct and if so, provide additional information: -### **{{ pi.display_name }}** -***Email:*** {{ pi.email_address }} - -**Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - -{% if is_me_pi %} -Since you are the person entering this information, you already have access and will receive all emails. -{% endif %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Flow_147b9li - Flow_0xnj2rp - - - - - Flow_1dcsioh - Flow_147b9li - Flow_00prawo - - - tru - - - false - - - Flow_16qr5jf - - - - Flow_0kcrx5l - Flow_1dcsioh - - - No PI entered in PB - Flow_00prawo - Flow_16qr5jf - - - Flow_0xnj2rp - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn b/tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn deleted file mode 100644 index 6a92338e8..000000000 --- a/tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn +++ /dev/null @@ -1,1209 +0,0 @@ - - - - - Flow_0kcrx5l - - - Flow_0kcrx5l - Flow_1dcsioh - current_user = ldap() -investigators = study_info('investigators') -# Primary Investigator -pi = investigators.get('PI', None) -is_cu_pi = False -if pi != None: - hasPI = True - study_data_set("PiUid",pi['uid']) - if pi.get('uid', None) != None: - pi_invalid_uid = False - if pi['uid'] == current_user['uid']: - is_cu_pi = True - else: - pi_invalid_uid = True -else: - hasPI = False - -# Department Chair -dc = investigators.get('DEPT_CH', None) -if dc != None: - if dc.get('uid', None) != None: - dc_invalid_uid = False - else: - dc_invalid_uid = True -else: - dc_invalid_uid = False - -# Primary Coordinators -pcs = {} -is_cu_pc = False -cnt_pcs_uid = 0 -for k in investigators.keys(): - if k in ['SC_I','SC_II','IRBC']: - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - pcs[k] = investigator - cnt_pcs_uid = cnt_pcs_uid + 1 - else: - is_cu_pc = True - is_cu_pc_role = investigator['label'] - else: - pcs[k] = investigator -cnt_pcs = len(pcs.keys()) -if cnt_pcs != cnt_pcs_uid: - pcs_invalid_uid = True -else: - pcs_invalid_uid = False -if cnt_pcs > 0: - del(k) - del(investigator) - -# Additional Coordinators -acs = {} -is_cu_ac = False -cnt_acs_uid = 0 -for k in investigators.keys(): - if k == 'AS_C': - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - acs[k] = investigator - cnt_acs_uid = cnt_acs_uid + 1 - else: - is_cu_ac = True - is_cu_ac_role = investigator['label'] - else: - acs[k] = investigator -cnt_acs = len(acs.keys()) -if cnt_pcs != cnt_pcs_uid: - acs_invalid_uid = True -else: - acs_invalid_uid = False -if cnt_acs > 0: - del(k) - del(investigator) - -# Sub-Investigatoers -subs = {} -is_cu_subs = False -cnt_subs_uid = 0 -for k in investigators.keys(): - if k[:2] == 'SI': - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - subs[k] = investigator - cnt_subs_uid = cnt_subs_uid + 1 - else: - is_cu_subs = True - else: - subs[k] = investigator -cnt_subs = len(subs.keys()) -if cnt_subs != cnt_subs_uid: - subs_invalid_uid = True -else: - subs_invalid_uid = False -if cnt_subs > 0: - del(k) - del(investigator) - -# Additional Personnel -aps = {} -is_cu_ap = False -cnt_aps_uid = 0 -for k in investigators.keys(): - if k in ['SCI','DC']: - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - aps[k] = investigator - cnt_aps_uid = cnt_aps_uid + 1 - else: - is_cu_ap = True - is_cu_ap_role = investigator['label'] - else: - aps[k] = investigator -cnt_aps = len(aps.keys()) -if cnt_aps != cnt_aps_uid: - aps_invalid_uid = True -else: - aps_invalid_uid = False -if cnt_aps > 0: - del(k) - del(investigator) -del(investigators) - - - temp - Flow_10zn0h1 - Flow_0kp47dz - - - ##### Please confirm Primary Investigator entered in Protocol Builder is correct and if so, provide additional information: -### **{{ pi.display_name }}** -***Email:*** {{ pi.email_address }} - -**Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - -{% if is_me_pi %} -Since you are the person entering this information, you already have access and will receive all emails. -{% endif %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Flow_1kg5jot - Flow_1mplloa - - - - - Flow_1dcsioh - Flow_147b9li - Flow_00prawo - - - - not(hasPI) or (hasPI and pi_invalid_uid) - - - No PI entered in PB - Flow_00prawo - Flow_16qr5jf - - - Flow_0kpe12r - Flow_1ayisx2 - Flow_0xifvai - Flow_1oqem42 - - - - - The following Primary Coordinators were entered in Protocol Builder: -{%+ for key, value in pcs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_pcs %}, {% endif %}{% endfor %} -To Save the current settings for all Primary Coordinators, select Save All. - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - -### Please provide supplemental information for: - #### {{ pc.display_name }} - ##### Title: {{ pc.title }} - - ##### Department: {{ pc.department }} - ##### Affiliation: {{ pc.affiliation }} - - - - - - - Flow_0xifvai - Flow_1n0k4pd - - - - cnt_pcs == 0 - - - Flow_0tfprc8 - Flow_0tsdclr - Flow_1grahhv - LDAP_dept = pi.department -length_LDAP_dept = len(LDAP_dept) -pi.E0 = {} -if length_LDAP_dept > 0: - E0_start = LDAP_dept.find("E0:") + 3 - E0_slice = LDAP_dept[E0_start:length_LDAP_dept] - E0_first_hyphen = E0_slice.find("-") - E0_dept_start = E0_first_hyphen + 1 - pi.E0.schoolAbbrv = E0_slice[0:E0_first_hyphen] - isSpace = " " in E0_slice - if isSpace: - E0_first_space = E0_slice.find(" ") - E0_spec_start = E0_first_space + 1 - E0_spec_end = len(E0_slice) - pi.E0.deptAbbrv = E0_slice[E0_dept_start:E0_first_space] - pi.E0.specName = E0_slice[E0_spec_start:E0_spec_end] - else: - pi.E0.specName = "" -else: - pi.E0.schoolAbbrv = "Not in LDAP" - pi.E0.deptAbbrv = "Not in LDAP" - pi.E0.specName = "Not in LDAP" - - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0iuzu7j - Flow_0whqr3p - - - - Flow_070j5fg - Flow_0vi6thu - Flow_00yhlrq - - - - RO_Chair_CID == pi.uid - - - The PI is also the RO Chair - Flow_00yhlrq - Flow_0kpe12r - - - - Flow_12ss6u8 - Flow_0dt3pjw - Flow_05rqrlf - Flow_0jxzqw1 - - - - - cnt_subs == 0 - - - The following Sub-Investigators were entered in Protocol Builder: -{%+ for key, value in subs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_subs %}, {% endif %}{% endfor %} -To Save the current settings for all Sub-Investigators, select Save All. - - -Otherwise, edit each Sub-Investigator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ sub.display_name }} - ##### Title: {{ sub.title }} - - ##### Department: {{ sub.department }} - ##### Affiliation: {{ sub.affiliation }} - - - - - - - Flow_05rqrlf - Flow_0ofpgml - - - - Please enter the Private Investigator in Protocol Builder. - Flow_16qr5jf - - - - - Flow_1grahhv - Flow_1kg5jot - pi.E0.schoolName = PI_E0_schoolName -pi.E0.deptName = PI_E0_deptName -pi_experience_key = "pi_experience_" + pi.user_id -pi.experience = user_data_get(pi_experience_key,"") -ro = {} -ro['chair'] = {} - - - - Flow_1oo0ijr - Flow_070j5fg - ro.chair = {} -ro.chair.uid = RO_Chair_CID -ro.chair.name_degree = RO_Chair_Name_Degree -ro.chair.title = RO_Chair_Title -ro.chair.sig_block = RO_Chair_Sig_Block - - - Flow_1n0k4pd - Flow_1oqem42 - Flow_1gtl2o3 - Flow_0dt3pjw - - - - - The following Additional Coordinators were entered in Protocol Builder: -{%+ for key, value in acs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_acs %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Coordinators, select Save All. - - - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ acs.display_name }} - ##### Title: {{ acs.title }} - - - ##### Department: {{ acs.department }} - ##### Affiliation: {{ acs.affiliation }} - Flow_1gtl2o3 - Flow_12ss6u8 - - - - cnt_acs == 0 - - - Flow_1va8c15 - Flow_1yd7kbi - Flow_0w4d2bz - - - Flow_1yd7kbi - Flow_13la8l3 - Flow_0ycdxbl - - - PIsPrimaryDepartmentSameAsRO.value != "yes" - - - - PIsPrimaryDepartmentSameAsRO.value == 'diffSchool' - - - Flow_0ycdxbl - Flow_1fj9iz0 - sch_enum = [] -if pi.E0.schoolAbbrv != "MD": - sch_enum_md = [ - { - "value": "MD", - "label": "Medicine" - }, - ] -else: - sch_enum_md = [] -if pi.E0.schoolAbbrv != "AS": - sch_enum_as = [ - { - "value": "AS", - "label": "Arts & Science" - }, - ] -else: - sch_enum_as = [] -if pi.E0.schoolAbbrv != "CU": - sch_enum_cu = [ - { - "value": "CU", - "label": "Education" - }, - ] -else: - sch_enum_cu = [] -if pi.E0.schoolAbbrv != "NR": - sch_enum_nr = [ - { - "value": "NR", - "label": "Nursing" - }, - ] -else: - sch_enum_nr = [] -sch_enum = sch_enum_md + sch_enum_as + sch_enum_cu + sch_enum_nr -del(sch_enum_md) -del(sch_enum_as) -del(sch_enum_cu) -del(sch_enum_nr) - - - - - - - - - - - - - - - Flow_1fj9iz0 - Flow_1yz8k2a - - - - - Flow_13la8l3 - Flow_0mdjaid - Flow_0fw4rck - Flow_1azfvtx - Flow_0giqf35 - Flow_0iuzu7j - - - ro.schoolAbbrv not in ["MD", "AS", "CU"] - - - Flow_0whqr3p - Flow_0zc01f9 - Flow_1vyg8ir - Flow_0m9peiz - - - - Flow_0m9peiz - Flow_1vv63qa - if PIsPrimaryDepartmentSameAsRO.value == "diffSchool": - ro.schoolName = RO_StudySchool.label - ro.schoolAbbrv = RO_StudySchool.value - -if PIsPrimaryDepartmentSameAsRO.value != "yes": - if ro.schoolAbbrv == "MD": - ro.deptName = RO_StudyDeptMedicine.label - ro.deptAbbrv = RO_StudyDeptMedicine.value - elif ro.schoolAbbrv == "AS": - ro.deptName = RO_StudyDeptArtsSciences.label - ro.deptAbbrv = RO_StudyDeptArtsSciences.value - elif ro.schoolAbbrv == "CU": - ro.deptName = RO_StudyDeptEducation.label - ro.deptAbbrv = RO_StudyDeptEducation.value - else: - ro.deptName = "" - ro.deptAbbrv = "" - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_1azfvtx - Flow_0zc01f9 - - - Flow_1e0yt3v - Flow_0shnt6k - Flow_1va8c15 - ro = {} -ro['schoolName'] = PI_E0_schoolName -ro['schoolAbbrv'] = pi.E0.schoolAbbrv -ro['deptName'] = pi.E0.deptName -ro['deptAbbrv'] = pi.E0.deptAbbrv - - - - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0giqf35 - Flow_1vyg8ir - - - - - - - Flow_1yz8k2a - Flow_0mdjaid - ro.schoolName = RO_StudySchool.label -ro.schoolAbbrv = RO_StudySchool.value - - - - ro.schoolAbbrv == "AS" - - - ro.schoolAbbrv == "CU" - - - Flow_1vv63qa - Flow_0fw4rck - Flow_0vff9k5 - - - - Flow_0ofpgml - Flow_0jxzqw1 - Flow_0q56tn8 - Flow_0kp47dz - - - - - cnt_aps == 0 - - - The following Additional Personnel were entered in Protocol Builder: -{%+ for key, value in aps.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_aps %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Personnel, select Save All. - - - - -Otherwise, edit each Additional Personnel as necessary and select the Save button for each. - - - - -### Please provide supplemental information for: - #### {{ ap.display_name }} - ##### Title: {{ ap.title }} - - - ##### Department: {{ ap.department }} - ##### Affiliation: {{ ap.affiliation }} - - - - - - - Flow_0q56tn8 - Flow_10zn0h1 - - - - Flow_147b9li - Flow_0tfprc8 - Flow_0nz62mu - - - - dc_invalid_uid or pcs_invalid_uid or acs_invalid_uid or subs_invalid_uid or aps_invalid_uid - - - Select No if all displayed invalid Computing IDs do not need system access and/or receive emails. If they do, correct in Protocol Builder first and then select Yes. - - -{% if dc_invalid_uid %} -Department Chair - {{ dc.error }} -{% endif %} -{% if pcs_invalid_uid %} -Primary Coordinators -{% for k, pc in pcs.items() %} - {% if pc.get('uid', None) == None: %} - {{ pc.error }} - {% endif %} -{% endfor %} -{% endif %} -{% if acs_invalid_uid %} -Additional Coordinators -{% for k, ac in acs.items() %} - {% if ac.get('uid', None) == None: %} - {{ ac.error }} - {% endif %} -{% endfor %} -{% endif %} -{% if subs_invalid_uid %} -Sub-Investigators -{% for k, sub in subs.items() %} - {% if sub.get('uid', None) == None: %} - {{ sub.error }} - {% endif %} -{% endfor %} -{% endif %} -{% if aps_invalid_uid %} -Additional Personnnel -{% for k, ap in aps.items() %} - {% if ap.get('uid', None) == None: %} - {{ ap.error }} - {% endif %} -{% endfor %} -{% endif %} - - - - - - - - - - Flow_0nz62mu - Flow_16bkbuc - - - Flow_16bkbuc - Flow_0tsdclr - Flow_1mtwuyq - - - - not(FixInvalidUIDs) - - - ***Name & Degree:*** {{ RO_Chair_Name_Degree }} -***School:*** {{ RO_School }} -***Department:*** {{ RO_Department }} -***Title:*** {{ RO_Chair_Title }} -***Email:*** {{ RO_Chair_CID }} - - -{% if RO_Chair_CID != dc.uid %} - *Does not match the Department Chair specified in Protocol Builder, {{ dc.display_name }}* -{% endif %} - - - - - - - - - - Flow_0vi6thu - Flow_1ayisx2 - - - - Flow_07ur9cc - Flow_0shnt6k - user_data_set(pi_experience_key, pi.experience) - - - Flow_1mplloa - Flow_07ur9cc - Flow_1e0yt3v - - - pi.experience != user_data_get(pi_experience_key,"") - - - - - Flow_0vff9k5 - Flow_0w4d2bz - Flow_1oo0ijr - - - Flow_1mtwuyq - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn b/tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn deleted file mode 100644 index 2a8793e98..000000000 --- a/tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn +++ /dev/null @@ -1,39 +0,0 @@ - - - - - Flow_0xpz6la - - - Flow_0xpz6la - Flow_03yam6h - print('complicated common task') - - - - Flow_03yam6h - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/rrt.bpmn b/tests/SpiffWorkflow/bpmn/data/rrt.bpmn deleted file mode 100644 index e6d1afb27..000000000 --- a/tests/SpiffWorkflow/bpmn/data/rrt.bpmn +++ /dev/null @@ -1,336 +0,0 @@ - - - - - SequenceFlow_05ja25w - - - ### UNIVERSITY OF VIRGINIA RESEARCH -#### Research Ramp-up Plan - - -As we plan for the resumption of on-grounds research, PIs are required to develop a Research Ramp-up Plan. Please use the ramp-up guidance provided to lay out your plan(s) to manage operations while prioritizing physical distancing, staggered work shifts to reduce group size, remote work, and other exposure-reducing measures. - - -Plans must be submitted to the Office of Research by Monday, May ?? for consideration in the first round of approvals. Plans will then be reviewed on a rolling basis going forward. - - -Instructions for Submitting: - - -1. Add a Request for each lab space you manage in a building. If your lab spans multiple rooms or floors in a single building, one request will be required for that lab. If your lab spans multipe buildings, one request for each building will be required for that lab. The primary reason for this differentiation is that in addition to obtaining approval to restart operations, this information will also be used after start up to assist with any contact tracing that may be needed. - - -2. Select each Request added and step through each form presented, responding to all required and applicable fields. You may be presented with different questions if activities in each lab differ. - - -3. After all forms have been completed, you will be presented with the option to create your Research Recovery Plan in Word format. Download the document and review it. If you see any corrections that need to be made, return to the coresponding form and make the correction. - - -4. Once the generated Research Recovery Plan is finalize, use the web site to submit it to the Office of the Vice President for Research for review. - - -Please submit questions on the Research Support website. - SequenceFlow_05ja25w - SequenceFlow_0h50bp3 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SequenceFlow_0h50bp3 - SequenceFlow_0bqu7pp - - - - - - ### {{ LabName }} -#### Lab details - - -Your response to these questions will determine if you do or do not provide additional information regarding each topic later. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SequenceFlow_0bqu7pp - Flow_0scfmzc - - - SequenceFlow_1qtrgbv - - - - Review plan, make changes if needed, continue of ready to submit. - Flow_1b6vbkk - Flow_1e2qi9s - - - - Flow_1e2qi9s - SequenceFlow_1qtrgbv - CompleteTemplate ResearchRecoveryPlan.docx RESEARCH_RECOVERY - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Flow_0so3402 - SequenceFlow_1yi9lig - - - Flow_0scfmzc - Flow_0so3402 - Flow_0141rp3 - - - isAnimalUse == True - - - - - - - - - - - - - - Flow_1121pfu - SequenceFlow_1b4non2 - - - Flow_0141rp3 - SequenceFlow_1yi9lig - Flow_1121pfu - SequenceFlow_1wp5zmg - - - isGrantSupport == True - - - SequenceFlow_1b4non2 - SequenceFlow_1wp5zmg - Flow_1b6vbkk - - - - - isGrantSupport == False - - - - isAnimalUse == False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json b/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json index f580929ad..39307bd3d 100644 --- a/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json +++ b/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json @@ -142,7 +142,7 @@ "typename":"SequenceFlow" } }, - "typename":"TestUserTask", + "typename":"UserTask", "extensions":{} }, "sid-C014B4B9-889F-4EE9-9949-C89502C35CF0":{ @@ -697,7 +697,7 @@ "typename":"SequenceFlow" } }, - "typename":"TestUserTask", + "typename":"UserTask", "extensions":{} }, "sid-2EDAD784-7F15-486C-B805-D26EE25F8087":{ @@ -906,7 +906,7 @@ "typename":"SequenceFlow" } }, - "typename":"TestUserTask", + "typename":"UserTask", "extensions":{} }, "sid-BC014079-199F-4720-95CD-244B0ACB6DE1":{ diff --git a/tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn b/tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn new file mode 100644 index 000000000..22a39425a --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn @@ -0,0 +1,81 @@ + + + + + Flow_177wrsb + + + + + Flow_0hkxb5e + + + + Flow_177wrsb + Flow_0eductu + a="a"*512 + + + Flow_0eductu + Flow_1xryi5d + b="b"*512 + + + + Flow_1xryi5d + Flow_1of7r00 + c="c"*512 + + + + Flow_1of7r00 + Flow_0hkxb5e + d=a+b+c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn b/tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn deleted file mode 100644 index 8b6acb97d..000000000 --- a/tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn +++ /dev/null @@ -1,77 +0,0 @@ - - - - - Flow_164sojd - - - Flow_1m2vq4v - Flow_04tuv5z - - - - Flow_0ac4lx5 - - timedelta(milliseconds=2) - - - - - Flow_0ac4lx5 - timer_called = True - - - Some docs - Flow_04tuv5z - - - - - Flow_164sojd - Flow_1m2vq4v - timer_called = False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py b/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py index 6e5497842..29febd248 100644 --- a/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py +++ b/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py @@ -2,6 +2,7 @@ from datetime import timedelta from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition from SpiffWorkflow.task import TaskState @@ -11,7 +12,7 @@ class EventBsedGatewayTest(BpmnWorkflowTestCase): def setUp(self): self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v') - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.workflow = BpmnWorkflow(self.spec, script_engine=self.script_engine) def testEventBasedGateway(self): @@ -29,8 +30,8 @@ class EventBsedGatewayTest(BpmnWorkflowTestCase): self.workflow.script_engine = self.script_engine self.assertEqual(len(waiting_tasks), 1) self.workflow.catch(MessageEventDefinition('message_1')) - self.workflow.refresh_waiting_tasks() self.workflow.do_engine_steps() + self.workflow.refresh_waiting_tasks() self.assertEqual(self.workflow.is_completed(), True) self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.COMPLETED) self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py b/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py index bf89912c2..58e100ea0 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py @@ -5,6 +5,7 @@ import unittest import time from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -24,11 +25,11 @@ class CustomScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = { + environment = TaskDataEnvironment({ 'custom_function': my_custom_function, 'timedelta': datetime.timedelta, - } - super().__init__(scripting_additions=augment_methods) + }) + super().__init__(environment=environment) class TimerCycleStartTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py b/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py index 5c61f3818..452e71ed8 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py @@ -5,6 +5,7 @@ import unittest import time from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -22,11 +23,11 @@ class CustomScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = { + environment = TaskDataEnvironment({ 'custom_function': my_custom_function, 'timedelta': datetime.timedelta, - } - super().__init__(scripting_additions=augment_methods) + }) + super().__init__(environment=environment) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py b/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py index deebd7754..e56cc3936 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py @@ -6,6 +6,7 @@ import time from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -14,10 +15,10 @@ __author__ = 'kellym' class TimerDateTest(BpmnWorkflowTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={ + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({ "datetime": datetime.datetime, "timedelta": datetime.timedelta, - }) + })) self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py b/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py index aff5d4299..1cd2c17b8 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py @@ -6,6 +6,7 @@ from datetime import timedelta from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -13,7 +14,7 @@ __author__ = 'kellym' class TimerDurationTest(BpmnWorkflowTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py b/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py index c8e72fcd4..18cbd12d2 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py @@ -5,6 +5,7 @@ import time from datetime import datetime, timedelta from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -13,7 +14,7 @@ __author__ = 'kellym' class TimerDurationTest(BpmnWorkflowTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py b/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py index e392e5dba..5fa99e92c 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py +++ b/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py @@ -4,7 +4,6 @@ import os from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer -from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter class BaseTestCase(unittest.TestCase): @@ -21,7 +20,7 @@ class BaseTestCase(unittest.TestCase): def setUp(self): super(BaseTestCase, self).setUp() - wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter]) + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter() self.serializer = BpmnWorkflowSerializer(wf_spec_converter, version=self.SERIALIZER_VERSION) spec, subprocesses = self.load_workflow_spec('random_fact.bpmn', 'random_fact') self.workflow = BpmnWorkflow(spec, subprocesses) diff --git a/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py b/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py index ed547952e..88612867f 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py +++ b/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py @@ -5,7 +5,6 @@ import json from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter from .BaseTestCase import BaseTestCase @@ -71,7 +70,7 @@ class BpmnWorkflowSerializerTest(BaseTestCase): try: self.assertRaises(TypeError, self.serializer.serialize_json, self.workflow) - wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter]) + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter() custom_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=self.SERIALIZER_VERSION,json_encoder_cls=MyJsonEncoder, json_decoder_cls=MyJsonDecoder) serialized_workflow = custom_serializer.serialize_json(self.workflow) finally: diff --git a/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py b/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py index cd38b5f82..cae051baf 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py +++ b/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py @@ -3,9 +3,11 @@ import time from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from .BaseTestCase import BaseTestCase + class VersionMigrationTest(BaseTestCase): SERIALIZER_VERSION = "1.2" @@ -24,7 +26,7 @@ class VersionMigrationTest(BaseTestCase): def test_convert_1_1_to_1_2(self): fn = os.path.join(self.DATA_DIR, 'serialization', 'v1-1.json') wf = self.serializer.deserialize_json(open(fn).read()) - wf.script_engine = PythonScriptEngine(default_globals={"time": time}) + wf.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"time": time})) wf.refresh_waiting_tasks() wf.do_engine_steps() - self.assertTrue(wf.is_completed()) \ No newline at end of file + self.assertTrue(wf.is_completed()) diff --git a/tests/SpiffWorkflow/camunda/BaseTestCase.py b/tests/SpiffWorkflow/camunda/BaseTestCase.py index 67d9c590a..8cdde5629 100644 --- a/tests/SpiffWorkflow/camunda/BaseTestCase.py +++ b/tests/SpiffWorkflow/camunda/BaseTestCase.py @@ -1,22 +1,20 @@ # -*- coding: utf-8 -*- import os +from copy import deepcopy from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser -from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter, StartEventConverter, EndEventConverter, \ - IntermediateCatchEventConverter, IntermediateThrowEventConverter, BoundaryEventConverter +from SpiffWorkflow.camunda.serializer.config import CAMUNDA_SPEC_CONFIG -from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase +CAMUNDA_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter) __author__ = 'danfunk' -wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([ - UserTaskConverter, BusinessRuleTaskConverter, StartEventConverter, - EndEventConverter, BoundaryEventConverter, IntermediateCatchEventConverter, - IntermediateThrowEventConverter]) +wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(CAMUNDA_SPEC_CONFIG) class BaseTestCase(BpmnWorkflowTestCase): """ Provides some basic tools for loading up and parsing camunda BPMN files """ diff --git a/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py b/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py index 400f60117..a7d9d6c54 100644 --- a/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py +++ b/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py @@ -16,7 +16,7 @@ class CallActivityMessageTest(BaseTestCase): def testRunThroughHappy(self): self.actual_test(save_restore=False) - def testThroughSaveRestore(self): + def testRunThroughSaveRestore(self): self.actual_test(save_restore=True) def actual_test(self, save_restore=False): diff --git a/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py b/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py index cde4662c0..23d256346 100644 --- a/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py +++ b/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py @@ -1,5 +1,6 @@ import unittest from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from .BaseTestCase import BaseTestCase @@ -12,8 +13,8 @@ def my_custom_function(txt): class CustomScriptEngine(PythonScriptEngine): def __init__(self): - augment_methods = {'my_custom_function': my_custom_function} - super().__init__(scripting_additions=augment_methods) + environment = TaskDataEnvironment({'my_custom_function': my_custom_function}) + super().__init__(environment=environment) class DMNCustomScriptTest(BaseTestCase): diff --git a/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py b/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py index ce830b3a8..8c0bf3c9c 100644 --- a/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py +++ b/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py @@ -7,6 +7,7 @@ from datetime import timedelta from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from .BaseTestCase import BaseTestCase __author__ = 'kellym' @@ -15,7 +16,7 @@ __author__ = 'kellym' class MessageBoundaryTest(BaseTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py b/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py index c381d5b6f..e31aa083b 100644 --- a/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py +++ b/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py @@ -1,6 +1,8 @@ import unittest from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment from .BaseTestCase import BaseTestCase @@ -10,12 +12,13 @@ class MultiInstanceDMNTest(BaseTestCase): self.spec, subprocesses = self.load_workflow_spec( 'DMNMultiInstance.bpmn', 'Process_1', 'test_integer_decision_multi.dmn') self.workflow = BpmnWorkflow(self.spec) + self.script_engine = PythonScriptEngine(environment=BoxedTaskDataEnvironment()) + self.workflow.script_engine = self.script_engine def testConstructor(self): pass # this is accomplished through setup. def testDmnHappy(self): - self.workflow = BpmnWorkflow(self.spec) self.workflow.do_engine_steps() self.workflow.complete_next() self.workflow.do_engine_steps() @@ -25,16 +28,19 @@ class MultiInstanceDMNTest(BaseTestCase): def testDmnSaveRestore(self): - self.workflow = BpmnWorkflow(self.spec) self.save_restore() + self.workflow.script_engine = self.script_engine self.workflow.do_engine_steps() self.workflow.complete_next() self.save_restore() + self.workflow.script_engine = self.script_engine self.workflow.do_engine_steps() self.workflow.complete_next() self.save_restore() + self.workflow.script_engine = self.script_engine self.workflow.do_engine_steps() self.save_restore() + self.workflow.script_engine = self.script_engine self.assertEqual(self.workflow.data['stuff']['E']['y'], 'D') diff --git a/tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn b/tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn deleted file mode 100644 index 4acb9f8e3..000000000 --- a/tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn +++ /dev/null @@ -1,94 +0,0 @@ - - - - - Flow_1wis1un - - - - - - - - Flow_1wis1un - Flow_144jxvd - - - - Flow_144jxvd - Flow_1riszc2 - Flow_0xdvee4 - - - - morestuff == 'Yes' - - - Flow_13ncefd - Flow_0xdvee4 - - - - - - - - - Flow_1riszc2 - Flow_13ncefd - - 3 - - - - morestuff == 'No' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/camunda/data/random_fact.svg b/tests/SpiffWorkflow/camunda/data/random_fact.svg deleted file mode 100644 index 3078ea0e3..000000000 --- a/tests/SpiffWorkflow/camunda/data/random_fact.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - -Set TypeDisplay FactUser sets the Fact.type to cat,norris, or buzzwordMakes an API call to get a factof the required type. \ No newline at end of file diff --git a/tests/SpiffWorkflow/camunda/data/top_workflow.bpmn b/tests/SpiffWorkflow/camunda/data/top_workflow.bpmn deleted file mode 100644 index c36573741..000000000 --- a/tests/SpiffWorkflow/camunda/data/top_workflow.bpmn +++ /dev/null @@ -1,64 +0,0 @@ - - - - - Flow_1xegt6f - - - - - Flow_0qc6vpv - - - - - Flow_1xegt6f - Flow_11qyfqv - my_custom_function('test 1 from top workflow') - - - Flow_11qyfqv - Flow_0hntmrc - - - Flow_0hntmrc - Flow_0qc6vpv - my_custom_function('test 2 from top workflow') - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py b/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py index 3de8fa2a6..b24f1c373 100644 --- a/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py +++ b/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py @@ -1,7 +1,8 @@ import unittest from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, EnumFormField -from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter +from SpiffWorkflow.camunda.serializer.task_spec import UserTaskConverter +from SpiffWorkflow.bpmn.serializer.helpers.dictionary import DictionaryConverter from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec @@ -53,7 +54,7 @@ class UserTaskSpecTest(unittest.TestCase): self.form.add_field(field1) self.form.add_field(field2) - converter = UserTaskConverter() + converter = UserTaskConverter(DictionaryConverter()) dct = converter.to_dict(self.user_spec) self.assertEqual(dct['name'], 'userTask') self.assertEqual(dct['form'], { diff --git a/tests/SpiffWorkflow/dmn/DecisionRunner.py b/tests/SpiffWorkflow/dmn/DecisionRunner.py index 133f12922..efb9d89bf 100644 --- a/tests/SpiffWorkflow/dmn/DecisionRunner.py +++ b/tests/SpiffWorkflow/dmn/DecisionRunner.py @@ -2,7 +2,7 @@ import os from lxml import etree -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine from SpiffWorkflow.dmn.parser.DMNParser import DMNParser, get_dmn_ns diff --git a/tests/SpiffWorkflow/dmn/HitPolicyTest.py b/tests/SpiffWorkflow/dmn/HitPolicyTest.py index 061ba660e..898aeeb3a 100644 --- a/tests/SpiffWorkflow/dmn/HitPolicyTest.py +++ b/tests/SpiffWorkflow/dmn/HitPolicyTest.py @@ -1,14 +1,11 @@ import os import unittest -from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine +from SpiffWorkflow.bpmn.serializer.helpers.dictionary import DictionaryConverter from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser -from SpiffWorkflow.dmn.serializer.task_spec_converters import \ - BusinessRuleTaskConverter +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase -from tests.SpiffWorkflow.dmn.DecisionRunner import DecisionRunner -from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import \ - PythonDecisionRunner +from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import PythonDecisionRunner class HitPolicyTest(BpmnWorkflowTestCase): @@ -38,8 +35,8 @@ class HitPolicyTest(BpmnWorkflowTestCase): runner = PythonDecisionRunner(file_name) decision_table = runner.decision_table self.assertEqual("COLLECT", decision_table.hit_policy) - dict = BusinessRuleTaskConverter().decision_table_to_dict(decision_table) - new_table = BusinessRuleTaskConverter().decision_table_from_dict(dict) + dict = BusinessRuleTaskConverter(DictionaryConverter()).decision_table_to_dict(decision_table) + new_table = BusinessRuleTaskConverter(DictionaryConverter()).decision_table_from_dict(dict) self.assertEqual("COLLECT", new_table.hit_policy) def suite(): diff --git a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py index 1ab727f9d..95c539c8e 100644 --- a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py +++ b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .FeelDecisionRunner import FeelDecisionRunner @@ -19,7 +19,7 @@ class FeelDictDecisionTestClass(unittest.TestCase): "PEANUTS": {"delicious": True}, "SPAM": {"delicious": False} }} - PythonScriptEngine.convert_to_box(PythonScriptEngine(), data) + Box.convert_to_box(data) res = self.runner.decide(data) self.assertEqual(res.description, 'They are allergic to peanuts') diff --git a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py index bf19b44e5..6978fa9fe 100644 --- a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py +++ b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .FeelDecisionRunner import FeelDecisionRunner diff --git a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py index 375b9fd16..a9e9e2d5b 100644 --- a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py +++ b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .PythonDecisionRunner import PythonDecisionRunner diff --git a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py index d81acb472..df569ccd8 100644 --- a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py +++ b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .PythonDecisionRunner import PythonDecisionRunner diff --git a/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py b/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py index c3ef77ce4..e2e753c69 100644 --- a/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py +++ b/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py @@ -2,11 +2,12 @@ import datetime from decimal import Decimal from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from ..DecisionRunner import DecisionRunner class PythonDecisionRunner(DecisionRunner): def __init__(self, filename): - scripting_additions={'Decimal': Decimal, 'datetime': datetime} - super().__init__(PythonScriptEngine(scripting_additions=scripting_additions), filename, 'python_engine') + environment = TaskDataEnvironment({'Decimal': Decimal, 'datetime': datetime}) + super().__init__(PythonScriptEngine(environment=environment), filename, 'python_engine') diff --git a/tests/SpiffWorkflow/spiff/BaseTestCase.py b/tests/SpiffWorkflow/spiff/BaseTestCase.py index b085d1f76..f1826a78d 100644 --- a/tests/SpiffWorkflow/spiff/BaseTestCase.py +++ b/tests/SpiffWorkflow/spiff/BaseTestCase.py @@ -1,27 +1,17 @@ # -*- coding: utf-8 -*- import os +from copy import deepcopy from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser, VALIDATOR -from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter, \ - ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, \ - SubWorkflowTaskConverter, TransactionSubprocessConverter, \ - CallActivityTaskConverter, \ - StartEventConverter, EndEventConverter, BoundaryEventConverter, \ - SendTaskConverter, ReceiveTaskConverter, \ - IntermediateCatchEventConverter, IntermediateThrowEventConverter, \ - ServiceTaskConverter -from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter +from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase -wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([ - NoneTaskConverter, ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, - SubWorkflowTaskConverter, TransactionSubprocessConverter, CallActivityTaskConverter, - StartEventConverter, EndEventConverter, BoundaryEventConverter, SendTaskConverter, ReceiveTaskConverter, - IntermediateCatchEventConverter, IntermediateThrowEventConverter, BusinessRuleTaskConverter, - ServiceTaskConverter -]) +SPIFF_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter) + +wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG) class BaseTestCase(BpmnWorkflowTestCase): """ Provides some basic tools for loading up and parsing Spiff extensions""" From 28e98b66e573557a541d4f5e508e06b84491f979 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 21:00:20 -0500 Subject: [PATCH 24/59] remove flask-bpmn --- bin/pull-subtrees | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/pull-subtrees b/bin/pull-subtrees index 8002791b9..ee1e988f5 100755 --- a/bin/pull-subtrees +++ b/bin/pull-subtrees @@ -10,7 +10,6 @@ set -o errtrace -o errexit -o nounset -o pipefail for subtree in "SpiffWorkflow" \ "spiffworkflow-backend" \ "spiffworkflow-frontend" \ - "flask-bpmn" \ "bpmn-js-spiffworkflow" \ "connector-proxy-demo" do From 5735d748c245b8527d2237be9b09b81789af8619 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 21:55:26 -0500 Subject: [PATCH 25/59] import EventBasedGatewayConverter from correct package --- poetry.lock | 2 +- .../services/process_instance_processor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3608303e0..2c2711b56 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1760,7 +1760,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "1f51db962ccaed5810f5d0f7d76a932f056430ab" +resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad" [[package]] name = "sqlalchemy" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index b45add697..022560c6a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -43,7 +43,7 @@ from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( # type: ignore +from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore EventBasedGatewayConverter, ) from SpiffWorkflow.task import Task as SpiffTask # type: ignore From 70d636ccada71a398dddc70086e77a5be7a94deb Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 22:04:34 -0500 Subject: [PATCH 26/59] couple last serializer updates --- .../services/process_instance_processor.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 022560c6a..c9d43f92c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -30,6 +30,9 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ig from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment +from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore + EventBasedGatewayConverter, +) from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore @@ -43,9 +46,6 @@ from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore -from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore - EventBasedGatewayConverter, -) from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore @@ -393,7 +393,7 @@ class ProcessInstanceProcessor: SPIFF_SPEC_CONFIG ) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) - _event_serializer = EventBasedGatewayConverter() + _event_serializer = EventBasedGatewayConverter(wf_spec_converter) PROCESS_INSTANCE_ID_KEY = "process_instance_id" VALIDATION_PROCESS_KEY = "validate_only" @@ -971,7 +971,7 @@ class ProcessInstanceProcessor: def send_bpmn_event(self, event_data: dict[str, Any]) -> None: """Send an event to the workflow.""" payload = event_data.pop("payload", None) - event_definition = self._event_serializer.restore(event_data) + event_definition = self._event_serializer.registry.restore(event_data) if payload is not None: event_definition.payload = payload current_app.logger.info( From c945304b06acba512d20cd8b838e1064dab070e3 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 11:06:40 -0500 Subject: [PATCH 27/59] clean up sentry notification and avoid logger.exception when we do not want sentry --- .../exceptions/api_error.py | 38 ++++++++++++++----- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index ab5bf1c3a..46d2ad549 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -171,18 +171,30 @@ def set_user_sentry_context() -> None: set_tag("username", username) +def should_notify_sentry(exception: Exception) -> bool: + """Determine if we should notify sentry. + + We want to capture_exception to log the exception to sentry, but we don't want to log: + 1. ApiErrors that are just invalid tokens + 2. NotAuthorizedError. we usually call check-permissions before calling an API to + make sure we'll have access, but there are some cases + where it's more convenient to just make the call from the frontend and handle the 403 appropriately. + """ + if isinstance(exception, ApiError): + if exception.error_code == "invalid_token": + return False + if isinstance(exception, NotAuthorizedError): + return False + return True + + @api_error_blueprint.app_errorhandler(Exception) # type: ignore def handle_exception(exception: Exception) -> flask.wrappers.Response: """Handles unexpected exceptions.""" set_user_sentry_context() sentry_link = None - # we want to capture_exception to log the exception to sentry, but we don't want to log: - # 1. ApiErrors that are just invalid tokens - # 2. NotAuthorizedError - if ( - not isinstance(exception, ApiError) or exception.error_code != "invalid_token" - ) and not isinstance(exception, NotAuthorizedError): + if should_notify_sentry(exception): id = capture_exception(exception) if isinstance(exception, ApiError): @@ -198,10 +210,16 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" ) - # !!!NOTE!!!: do this after sentry stuff since calling logger.exception - # seems to break the sentry sdk context where we no longer get back - # an event id or send out tags like username - current_app.logger.exception(exception) + # !!!NOTE!!!: do this after sentry stuff since calling logger.exception + # seems to break the sentry sdk context where we no longer get back + # an event id or send out tags like username + current_app.logger.exception(exception) + else: + current_app.logger.error( + f"Received exception: {exception}. Since we do not want this particular" + " exception in sentry, we cannot use logger.exception, so there will be no" + " backtrace. see api_error.py" + ) error_code = "internal_server_error" status_code = 500 From 548c56e3580f2f986e625fdf77782d9ab6e20aae Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 12:51:57 -0500 Subject: [PATCH 28/59] make test_user_lists more complete and correct --- .../keycloak/bin/add_test_users_to_keycloak | 6 +++ .../realm_exports/sartography-realm.json | 2 +- .../keycloak/test_user_lists/sartography | 9 ++-- .../keycloak/test_user_lists/status | 45 ++++++++++++------- 4 files changed, 42 insertions(+), 20 deletions(-) diff --git a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak index 5ad11e13f..9a045ffe7 100755 --- a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak +++ b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak @@ -7,7 +7,13 @@ function error_handler() { trap 'error_handler ${LINENO} $?' ERR set -o errtrace -o errexit -o nounset -o pipefail +# you can get a list of users from the keycloak realm file like: +# grep '"email" :' keycloak/realm_exports/spiffworkflow-realm.json | awk -F : '{print $2}' | sed -E 's/ "//g' | sed -E 's/",//g' > s + +# we keep some of these in keycloak/test_user_lists +# spiffworkflow-realm.json is a mashup of the status and sartography user lists. user_file_with_one_email_per_line="${1:-}" + keycloak_realm="${2:-spiffworkflow}" if [[ -z "${1:-}" ]]; then >&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]" diff --git a/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json b/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json index 37704ea52..20c19e24d 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json @@ -547,7 +547,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, - "email" : "kevin@sartography.com", + "email" : "kb@sartography.com", "credentials" : [ { "id" : "4057e784-689d-47c0-a164-035a69e78edf", "type" : "password", diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index 4f98a51e0..b6f685b8f 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -1,8 +1,11 @@ +admin@spiffworkflow.org alex@sartography.com dan@sartography.com -kevin@sartography.com -jason@sartography.com -mike@sartography.com +daniel@sartography.com elizabeth@sartography.com +jason@sartography.com jon@sartography.com +kb@sartography.com +madhurya@sartography.com +mike@sartography.com natalia@sartography.com diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 667c4f033..cb5107478 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -1,25 +1,38 @@ +admin@spiffworkflow.org +amir@status.im +app.program.lead@status.im +core@status.im +dao.project.lead@status.im +desktop.program.lead@status.im +desktop.project.lead@status.im +fin1@status.im +fin@status.im finance.lead@status.im -legal.lead@status.im -program.lead@status.im -services.lead@status.im finance.sme@status.im -infra.sme@status.im -legal.sme@status.im -security.sme@status.im -ppg.ba@status.im -peopleops.partner@status.im -peopleops.talent@status.im +finance_user1@status.im +harmeet@status.im infra.program-lead@status.im infra.project-lead@status.im -dao.project.lead@status.im -desktop.project.lead@status.im -app.program.lead@status.im -desktop.program.lead@status.im -legal.program-lead.sme@status.im -legal.project-lead.sme@status.im -legal1.sme@status.im +infra.sme@status.im infra1.sme@status.im infra2.sme@status.im +jakub@status.im +jarrad@status.im +lead1@status.im +lead@status.im +legal.lead@status.im +legal.program-lead.sme@status.im +legal.project-lead.sme@status.im +legal.sme@status.im +legal1.sme@status.im +manuchehr@status.im +peopleops.partner@status.im +peopleops.talent@status.im +ppg.ba@status.im +program.lead@status.im +sasha@status.im security.program-lead.sme@status.im security.project-lead.sme@status.im +security.sme@status.im security1.sme@status.im +services.lead@status.im From af89d3896328b2d183fbfa421ac0e7158a0a7a1a Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 13:02:50 -0500 Subject: [PATCH 29/59] remove service accounts, formalize j, add madhurya --- .../keycloak/bin/export_keycloak_realms | 3 + .../realm_exports/spiffworkflow-realm.json | 106 ++++++++---------- .../keycloak/test_user_lists/sartography | 2 + 3 files changed, 50 insertions(+), 61 deletions(-) diff --git a/spiffworkflow-backend/keycloak/bin/export_keycloak_realms b/spiffworkflow-backend/keycloak/bin/export_keycloak_realms index f205d0d7d..7e55ae6fd 100755 --- a/spiffworkflow-backend/keycloak/bin/export_keycloak_realms +++ b/spiffworkflow-backend/keycloak/bin/export_keycloak_realms @@ -21,6 +21,9 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa docker cp "keycloak:${docker_container_path}" "$local_tmp_dir" for realm in $realms ; do + if ! grep -Eq '\-realm$' <<< "$realm"; then + realm="${realm}-realm" + fi cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/" done diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 634caef71..c81e57ad6 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -903,7 +903,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", - "email" : "j@status.im", + "email" : "j@sartography.com", "credentials" : [ { "id" : "e71ec785-9133-4b7d-8015-1978379af0bb", "type" : "password", @@ -1163,6 +1163,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "99ce8a54-2941-4767-8ddf-52320b3708bd", + "createdTimestamp" : 1675447085191, + "username" : "madhurya", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "madhurya@sartography.com", + "credentials" : [ { + "id" : "4fa2bf1f-188e-42e3-9633-01d436864206", + "type" : "password", + "createdDate" : 1675447085252, + "secretData" : "{\"value\":\"6ZApQ7kx4YDc5ojW9eyFiSKMz5l3/Zl5PIScHEW1gtP3lrnnWqWgwcP+8cWkKdm3im+XrZwDQHjuGjGN5Rbjyw==\",\"salt\":\"HT3fCh245v8etRFIprXsyw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7", "createdTimestamp" : 1665517010600, @@ -1405,42 +1425,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "487d3a85-89dd-4839-957a-c3f6d70551f6", - "createdTimestamp" : 1657115173081, - "username" : "service-account-spiffworkflow-backend", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "service-account@status.im", - "serviceAccountClientId" : "spiffworkflow-backend", - "credentials" : [ ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "clientRoles" : { - "spiffworkflow-backend" : [ "uma_protection" ] - }, - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "22de68b1-4b06-4bc2-8da6-0c577e7e62ad", - "createdTimestamp" : 1657055472800, - "username" : "service-account-withauth", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "service-account-withauth@status.im", - "serviceAccountClientId" : "withAuth", - "credentials" : [ ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "clientRoles" : { - "withAuth" : [ "uma_protection" ] - }, - "notBefore" : 0, - "groups" : [ ] }, { "id" : "3d45bb85-0a2d-4b15-8a19-d26a5619d359", "createdTimestamp" : 1674148694810, @@ -2674,7 +2658,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2692,7 +2676,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2782,7 +2766,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "feafc299-fede-4880-9e23-eb81aca22808", + "id" : "8facbab5-bca2-42c6-8608-ed94dacefe92", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2804,7 +2788,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ce7904d0-9182-49a2-aa71-a7b43e21f3ac", + "id" : "be52bd38-2def-41e7-a021-69bae78e92b7", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2833,7 +2817,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d9c6909a-5cc1-4ddf-b297-dbfcf6e609a6", + "id" : "ee18f6d1-9ca3-4535-a7a0-9759f3841513", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2855,7 +2839,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "083a589e-a486-42b6-ae73-1ec983967ff5", + "id" : "c76481eb-7997-4231-abac-632afd97631f", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2877,7 +2861,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7f0248b0-2d51-4175-9fd2-52b606a39e26", + "id" : "14fe94d2-f3ef-4349-9cbe-79921c013108", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2899,7 +2883,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "44465f1f-c700-4ec0-a234-d95c994c9e25", + "id" : "533c45e3-10d9-480b-9c9b-c2f746fb6f66", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2921,7 +2905,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8cf09055-5b98-4fc8-b867-3dffacdec21b", + "id" : "1161d043-26ba-420c-baed-b220bcef40f1", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2943,7 +2927,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "16b50b3e-4240-4f49-a85e-1bfd40def300", + "id" : "cbba8afb-920f-4ae0-85f3-6bc520485dc2", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2966,7 +2950,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2aa981ae-d67e-49fb-95a4-91de1e5ab724", + "id" : "7b349cd1-fb1c-4d04-b5b5-885352277562", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2988,7 +2972,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "cf8406f7-09c3-4614-a898-99c9d66746f6", + "id" : "de10b07d-98b5-483c-b193-b1b93229478f", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -3024,7 +3008,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e1ec7d6e-7612-4c5b-afce-c7f4fddbf6ec", + "id" : "4504d37b-3a2d-4cc9-b300-29482d86c72e", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -3060,7 +3044,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f5862b09-6e01-4c88-b44e-26dc59d71b80", + "id" : "9d86bdff-ba8e-433a-8536-a49c0af5faf2", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -3089,7 +3073,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7caa8611-8b13-437e-83b2-556899b5444f", + "id" : "546d31fc-a885-46eb-94bd-171d04f16a7c", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -3104,7 +3088,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "91d40deb-344f-4e0b-a845-98b2fc4a633a", + "id" : "70e5d629-4338-4aec-8671-fc7cf4c450b1", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -3127,7 +3111,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f221b5e6-1bcc-4b37-ba61-4d3bc6a30a8b", + "id" : "7213dc19-6e0b-4241-bef6-2409346a2745", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -3149,7 +3133,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3ed8e597-19af-4ec8-b532-a97311f52de3", + "id" : "f91a8499-8cf5-408c-b85d-40e85a3f6ee3", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3171,7 +3155,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3970fd16-3786-4eb3-9efe-453d0984b18b", + "id" : "9ec3751c-619e-4edc-a14f-4ac9c60b056f", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3187,7 +3171,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e26b27b4-c957-491c-bb6d-9d226b22399c", + "id" : "8048e711-8e77-4b85-8b26-243948a7c2f4", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3223,7 +3207,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3ae37429-a623-42e3-a4a1-f9586b96b730", + "id" : "5a08de49-dd24-4e53-a656-9fac52fc6d2b", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3259,7 +3243,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7606ecd5-eb13-4aee-bd9f-3ec4ce77c59c", + "id" : "42bc970f-3ee5-429c-a543-e8078808d371", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3275,13 +3259,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "058b3c89-4ea4-43fa-b337-e523b1d93ec3", + "id" : "23f4f930-3290-4a63-ac96-f7ddc04fbce2", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "21410ac7-4b82-4f19-aae2-43ac33ba3f8f", + "id" : "4cfa7fa4-1a9b-4464-9510-460208e345eb", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index b6f685b8f..1b7166bb1 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -3,9 +3,11 @@ alex@sartography.com dan@sartography.com daniel@sartography.com elizabeth@sartography.com +j@sartography.com jason@sartography.com jon@sartography.com kb@sartography.com +kevin@sartography.com madhurya@sartography.com mike@sartography.com natalia@sartography.com From ff1ccdd60059c4f1a10c4b734458a8da98d7f5d6 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 13:11:39 -0500 Subject: [PATCH 30/59] add more users, and try to prevent sentry notification again --- .../realm_exports/spiffworkflow-realm.json | 244 ++++++++++++++++-- .../keycloak/test_user_lists/status | 8 + .../exceptions/api_error.py | 6 +- 3 files changed, 230 insertions(+), 28 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index c81e57ad6..722f12760 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -1083,6 +1083,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "e911fb0f-fd07-4886-acbf-d00930d293d3", + "createdTimestamp" : 1675447845512, + "username" : "legal.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.program-lead@status.im", + "credentials" : [ { + "id" : "9676d8d3-1e8c-4f5d-b5f7-49745cecf8fd", + "type" : "password", + "createdDate" : 1675447845577, + "secretData" : "{\"value\":\"vTffScfGXIjWWyDDfzo7JPiJe9VjAtrmds382EeV7N+wYNapJmLTVModkBsmGPy4TmWLc9BoysQynOaanSGi9Q==\",\"salt\":\"67ZxTEnar8aq4LZLhSNTFg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "eff82d12-9a67-4002-b3c5-37811bd45199", "createdTimestamp" : 1675349217585, @@ -1103,6 +1123,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "4ed2b5a2-16c2-4029-ae97-d75c60f2147f", + "createdTimestamp" : 1675447845616, + "username" : "legal.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.project-lead@status.im", + "credentials" : [ { + "id" : "fd0b0d0a-8a3e-48c9-b17b-023e87057048", + "type" : "password", + "createdDate" : 1675447845652, + "secretData" : "{\"value\":\"l/DPfNBcHINV8lCf9nEyCJkFvaMGnLqcd1Y8t9taLqxb8r/ofY2ce79C19JCHDQJXRPRuCsMoobuFhhNR6aQmg==\",\"salt\":\"2ivCPrNc56396ldlwpQP6Q==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "8cd6feba-5ca6-4cfb-bc1a-a52c80595783", "createdTimestamp" : 1675349217698, @@ -1305,6 +1345,86 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "9f703c96-02f1-403c-b070-25feb86cfe21", + "createdTimestamp" : 1675447845811, + "username" : "ppg.ba.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.program-lead@status.im", + "credentials" : [ { + "id" : "bf74118b-b28f-4d2f-8bfa-7b9d1a8345f2", + "type" : "password", + "createdDate" : 1675447845847, + "secretData" : "{\"value\":\"wFUAB6E98gE222nCfsKe6P3kSZxeOSjhflsxon8kw/dY4ZwN0KMwvlYuNhmoptTLqDQJyqUiydmlMK0NS4JjTQ==\",\"salt\":\"YCPk4Tc3eXcoes78oLhDEg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "81a1727b-c846-4af9-8d95-1c50b1deb0d5", + "createdTimestamp" : 1675447845879, + "username" : "ppg.ba.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.project-lead@status.im", + "credentials" : [ { + "id" : "6411830d-6015-4cf2-bac6-d49c26510319", + "type" : "password", + "createdDate" : 1675447845915, + "secretData" : "{\"value\":\"1+m8twycOEbA4X61zN7dLENqp2IxxQZrXKaf3mEuzmxouHrgxvmXudwC6DWyfjXvLm7gxWlaa4cofBFwr1idig==\",\"salt\":\"UEKUSScYv2xY+rJ8vlvF4A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "1d4d471a-b3ef-4750-97c4-a9e64eb8f414", + "createdTimestamp" : 1675447845942, + "username" : "ppg.ba.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.sme@status.im", + "credentials" : [ { + "id" : "6512f88a-cbcc-4d79-be17-1d132ba11e64", + "type" : "password", + "createdDate" : 1675447845977, + "secretData" : "{\"value\":\"EErx/3vG+lh4DgrJUzkBv4cLT3sK1gS+T9KD5V/JpvJUmJpRFQqpk+YxC/nC/kTGLIpRDdCIN690T84FlOIjew==\",\"salt\":\"FPeVGnFbt9TRNiORMB5LMQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "2dade29f-c6dc-445b-bdf0-eed316bdb638", + "createdTimestamp" : 1675447846003, + "username" : "ppg.ba.sme1", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.sme1@status.im", + "credentials" : [ { + "id" : "ccf2d138-020a-4a29-b63d-1f4d2f415639", + "type" : "password", + "createdDate" : 1675447846038, + "secretData" : "{\"value\":\"BtSJtW/8lCtyrDPTXzhsyT/32H+pOHx9thKqJV30dOEZ9wcSQbrRSHoQbXwLos+sIiA82X3wm+qObdQoD5guVQ==\",\"salt\":\"nSbgxYpVGaMz2ArmqLCN6Q==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "c3ea06ee-c497-48e6-8816-43c8ef68bd8b", "createdTimestamp" : 1674148694747, @@ -1345,6 +1465,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "c21c075d-9ac5-40a1-964a-c1d6ffe17257", + "createdTimestamp" : 1675447845680, + "username" : "security.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.program-lead@status.im", + "credentials" : [ { + "id" : "d1401dbd-a88b-44a6-b13c-fff13ee07e0c", + "type" : "password", + "createdDate" : 1675447845718, + "secretData" : "{\"value\":\"3D76RpIFG0/ixbSBeJfCc61kyL8PvVn/khA8FOy6RLg2hrZbs1Uwl8SmplnSUll1wD5a/BoobsO7v1XW4TCvwQ==\",\"salt\":\"YtDRRmBV4SBlO/oX23r2EQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "ace0432f-1818-4210-8bcf-15533abfb3ce", "createdTimestamp" : 1675349217958, @@ -1365,6 +1505,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "34dfacfd-24b5-414e-ac3e-9b013399aee2", + "createdTimestamp" : 1675447845747, + "username" : "security.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.project-lead@status.im", + "credentials" : [ { + "id" : "cb5d8a8a-e7d0-40e4-878b-a33608cb76c8", + "type" : "password", + "createdDate" : 1675447845784, + "secretData" : "{\"value\":\"rudimVOjVwJeO/1RLuyHySEaSQMzjHqPQrh5Pmfr4L2PgP/1oDKLVB38pKOohlbTarDcbAfMHB7AFYAPn9kuIg==\",\"salt\":\"cOkkUBOx/4AVUSa3Ozsiuw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "6272ac80-1d79-4e3c-a5c1-b31660560318", "createdTimestamp" : 1675349218020, @@ -1425,6 +1585,40 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "b768e3ef-f905-4493-976c-bc3408c04bec", + "createdTimestamp" : 1675447832524, + "username" : "service-account-spiffworkflow-backend", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "serviceAccountClientId" : "spiffworkflow-backend", + "credentials" : [ ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "clientRoles" : { + "spiffworkflow-backend" : [ "uma_protection" ] + }, + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "b6fb214b-cb8a-4403-9308-ac6d4e13ef26", + "createdTimestamp" : 1675447832560, + "username" : "service-account-withauth", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "serviceAccountClientId" : "withAuth", + "credentials" : [ ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "clientRoles" : { + "withAuth" : [ "uma_protection" ] + }, + "notBefore" : 0, + "groups" : [ ] }, { "id" : "3d45bb85-0a2d-4b15-8a19-d26a5619d359", "createdTimestamp" : 1674148694810, @@ -2658,7 +2852,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2676,7 +2870,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2766,7 +2960,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "8facbab5-bca2-42c6-8608-ed94dacefe92", + "id" : "cb39eda2-18c2-4b03-9d7c-672a2bd47d19", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2788,7 +2982,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "be52bd38-2def-41e7-a021-69bae78e92b7", + "id" : "96d4e28f-51ad-4737-87b4-5a10484ceb8b", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2817,7 +3011,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ee18f6d1-9ca3-4535-a7a0-9759f3841513", + "id" : "8f4c884d-93cd-4404-bc3a-1fa717b070c5", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2839,7 +3033,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c76481eb-7997-4231-abac-632afd97631f", + "id" : "166d1879-dd61-4fb4-b4f6-0a4d69f49da8", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2861,7 +3055,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "14fe94d2-f3ef-4349-9cbe-79921c013108", + "id" : "18cab8f9-f010-4226-a86e-8da2f1632304", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2883,7 +3077,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "533c45e3-10d9-480b-9c9b-c2f746fb6f66", + "id" : "04d8d1d1-5253-4644-b55d-8c9317818b33", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2905,7 +3099,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1161d043-26ba-420c-baed-b220bcef40f1", + "id" : "2bf21e1d-ff7e-4d52-8be7-31355945c302", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2927,7 +3121,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "cbba8afb-920f-4ae0-85f3-6bc520485dc2", + "id" : "fa8636a5-9969-41a5-9fef-9c825cceb819", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2950,7 +3144,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7b349cd1-fb1c-4d04-b5b5-885352277562", + "id" : "8656a884-6645-40b5-b075-c40736e27811", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2972,7 +3166,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "de10b07d-98b5-483c-b193-b1b93229478f", + "id" : "0d88d334-bfa4-4cf1-9fa3-17d0df0151d1", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -3008,7 +3202,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4504d37b-3a2d-4cc9-b300-29482d86c72e", + "id" : "9b195d67-e3e6-4983-8607-533b739ebd97", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -3044,7 +3238,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "9d86bdff-ba8e-433a-8536-a49c0af5faf2", + "id" : "fd0273a1-f6f4-4df1-a057-54ac4e91f4a9", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -3073,7 +3267,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "546d31fc-a885-46eb-94bd-171d04f16a7c", + "id" : "b457cba8-ef31-473b-a481-c095b2f4eb48", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -3088,7 +3282,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "70e5d629-4338-4aec-8671-fc7cf4c450b1", + "id" : "97519504-fd69-4c08-bd27-15d26fbc9b76", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -3111,7 +3305,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7213dc19-6e0b-4241-bef6-2409346a2745", + "id" : "fc6a4468-1a78-410d-ac97-cf9f05814850", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -3133,7 +3327,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f91a8499-8cf5-408c-b85d-40e85a3f6ee3", + "id" : "97a25d8a-25a0-4bf4-be6d-a6f019cf3a32", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3155,7 +3349,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "9ec3751c-619e-4edc-a14f-4ac9c60b056f", + "id" : "671e8ec7-af31-4c54-b6bb-96ebe69881de", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3171,7 +3365,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8048e711-8e77-4b85-8b26-243948a7c2f4", + "id" : "24d6aaaa-5202-4401-99c3-bb15925bd5be", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3207,7 +3401,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "5a08de49-dd24-4e53-a656-9fac52fc6d2b", + "id" : "f948bd43-ff05-4245-be30-a0a0dad2b7f0", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3243,7 +3437,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "42bc970f-3ee5-429c-a543-e8078808d371", + "id" : "7e4aaea7-05ca-4aa0-b934-4c81614620a8", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3259,13 +3453,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "23f4f930-3290-4a63-ac96-f7ddc04fbce2", + "id" : "14ca1058-25e7-41f6-85ce-ad0bfce2c67c", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "4cfa7fa4-1a9b-4464-9510-460208e345eb", + "id" : "16803de1-f7dc-4293-acde-fd0eae264377", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" @@ -3360,4 +3554,4 @@ "clientPolicies" : { "policies" : [ ] } -} +} \ No newline at end of file diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index cb5107478..66da936e9 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -22,17 +22,25 @@ lead1@status.im lead@status.im legal.lead@status.im legal.program-lead.sme@status.im +legal.program-lead@status.im legal.project-lead.sme@status.im +legal.project-lead@status.im legal.sme@status.im legal1.sme@status.im manuchehr@status.im peopleops.partner@status.im peopleops.talent@status.im +ppg.ba.program-lead@status.im +ppg.ba.project-lead@status.im +ppg.ba.sme1@status.im +ppg.ba.sme@status.im ppg.ba@status.im program.lead@status.im sasha@status.im security.program-lead.sme@status.im +security.program-lead@status.im security.project-lead.sme@status.im +security.project-lead@status.im security.sme@status.im security1.sme@status.im services.lead@status.im diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 46d2ad549..886e138e0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -215,10 +215,10 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: # an event id or send out tags like username current_app.logger.exception(exception) else: - current_app.logger.error( + current_app.logger.warning( f"Received exception: {exception}. Since we do not want this particular" - " exception in sentry, we cannot use logger.exception, so there will be no" - " backtrace. see api_error.py" + " exception in sentry, we cannot use logger.exception or logger.error, so" + " there will be no backtrace. see api_error.py" ) error_code = "internal_server_error" From b3de01cad66ca8814387788d3902b3589153d639 Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 3 Feb 2023 13:25:05 -0500 Subject: [PATCH 31/59] Prevent double click on submit of forms. --- spiffworkflow-frontend/src/routes/TaskShow.tsx | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 83e5df3f0..d68bd3da9 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -26,6 +26,7 @@ export default function TaskShow() { const [userTasks, setUserTasks] = useState(null); const params = useParams(); const navigate = useNavigate(); + const [disabled, setDisabled] = useState(false); const { addError, removeError } = useAPIError(); @@ -58,6 +59,7 @@ export default function TaskShow() { const processSubmitResult = (result: any) => { removeError(); + setDisabled(false); if (result.ok) { navigate(`/tasks`); } else if (result.process_instance_id) { @@ -68,13 +70,20 @@ export default function TaskShow() { }; const handleFormSubmit = (event: any) => { + if (disabled) { + return; + } + setDisabled(true); removeError(); const dataToSubmit = event.formData; delete dataToSubmit.isManualTask; HttpService.makeCallToBackend({ path: `/tasks/${params.process_instance_id}/${params.task_id}`, successCallback: processSubmitResult, - failureCallback: addError, + failureCallback: (error: any) => { + addError(error); + setDisabled(false); + }, httpMethod: 'PUT', postBody: dataToSubmit, }); From 0b9c82c68d1acff3e4324cc878ef291b40c013c9 Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 3 Feb 2023 13:58:38 -0500 Subject: [PATCH 32/59] run_pyl --- .../keycloak/realm_exports/spiffworkflow-realm.json | 2 +- .../services/process_instance_processor.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 722f12760..eab3bd968 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -3554,4 +3554,4 @@ "clientPolicies" : { "policies" : [ ] } -} \ No newline at end of file +} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 1524587fb..516011157 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -43,7 +43,7 @@ from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore -from SpiffWorkflow.exceptions import WorkflowException # type: ignore +from SpiffWorkflow.exceptions import WorkflowException from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore @@ -604,7 +604,7 @@ class ProcessInstanceProcessor: ) ) except Exception as err: - raise (err) + raise err finally: spiff_logger.setLevel(original_spiff_logger_log_level) @@ -633,7 +633,7 @@ class ProcessInstanceProcessor: ) -> None: """Raise_if_no_potential_owners.""" if not potential_owner_ids: - raise (NoPotentialOwnersForTaskError(message)) + raise NoPotentialOwnersForTaskError(message) def get_potential_owner_ids_from_task( self, task: SpiffTask From c3cb2888750f9b77b0bd0380a0b3265fa0efc14f Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 15:40:14 -0500 Subject: [PATCH 33/59] make form schema and form ui schema both dicts, add support for hiding fields based on task data --- .../realm_exports/spiffworkflow-realm.json | 2 +- .../src/spiffworkflow_backend/models/task.py | 4 +- .../routes/tasks_controller.py | 57 ++++++++++++------- .../color_question.json | 4 ++ .../dynamic_enums_ask_for_color.bpmn | 4 +- .../integration/test_process_api.py | 3 + .../src/routes/TaskShow.tsx | 2 +- 7 files changed, 51 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 722f12760..eab3bd968 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -3554,4 +3554,4 @@ "clientPolicies" : { "policies" : [ ] } -} \ No newline at end of file +} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 5c924196a..e1851773e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -115,8 +115,8 @@ class Task: process_model_display_name: Union[str, None] = None, process_group_identifier: Union[str, None] = None, process_model_identifier: Union[str, None] = None, - form_schema: Union[str, None] = None, - form_ui_schema: Union[str, None] = None, + form_schema: Union[dict, None] = None, + form_ui_schema: Union[dict, None] = None, parent: Optional[str] = None, event_definition: Union[dict[str, Any], None] = None, call_activity_process_identifier: Optional[str] = None, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 2879c1207..086f7a45b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -253,31 +253,16 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ) ) - form_contents = _prepare_form_data( + form_dict = _prepare_form_data( form_schema_file_name, spiff_task, process_model_with_form, ) - try: - # form_contents is a str - form_dict = json.loads(form_contents) - except Exception as exception: - raise ( - ApiError( - error_code="error_loading_form", - message=( - f"Could not load form schema from: {form_schema_file_name}." - f" Error was: {str(exception)}" - ), - status_code=400, - ) - ) from exception - if task.data: _update_form_schema_with_task_data_as_needed(form_dict, task) - if form_contents: + if form_dict: task.form_schema = form_dict if form_ui_schema_file_name: @@ -289,6 +274,23 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response if ui_form_contents: task.form_ui_schema = ui_form_contents + if task.form_ui_schema is None: + task.form_ui_schema = {} + + if task.data and "form_ui_hidden_fields" in task.data: + hidden_fields = task.data["form_ui_hidden_fields"] + for hidden_field in hidden_fields: + hidden_field_parts = hidden_field.split(".") + relevant_depth_of_ui_schema = task.form_ui_schema + for ii, hidden_field_part in enumerate(hidden_field_parts): + if hidden_field_part not in relevant_depth_of_ui_schema: + relevant_depth_of_ui_schema[hidden_field_part] = {} + relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[ + hidden_field_part + ] + if len(hidden_field_parts) == ii + 1: + relevant_depth_of_ui_schema["ui:widget"] = "hidden" + if task.properties and task.data and "instructionsForEndUser" in task.properties: if task.properties["instructionsForEndUser"]: try: @@ -525,14 +527,29 @@ def _get_tasks( def _prepare_form_data( form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo -) -> str: +) -> dict: """Prepare_form_data.""" if spiff_task.data is None: - return "" + return {} file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") try: - return _render_jinja_template(file_contents, spiff_task) + form_contents = _render_jinja_template(file_contents, spiff_task) + try: + # form_contents is a str + hot_dict: dict = json.loads(form_contents) + return hot_dict + except Exception as exception: + raise ( + ApiError( + error_code="error_loading_form", + message=( + f"Could not load form schema from: {form_file}." + f" Error was: {str(exception)}" + ), + status_code=400, + ) + ) from exception except WorkflowTaskException as wfe: wfe.add_note(f"Error in Json Form File '{form_file}'") api_error = ApiError.from_workflow_exception( diff --git a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json index 20ea1c124..a3528138c 100644 --- a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json +++ b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json @@ -13,6 +13,10 @@ "selectedColor": { "$ref": "#/definitions/Color", "title": "Select color" + }, + "veryImportantFieldButOnlySometimes": { + "title": "Very important field", + "type": "string" } } } diff --git a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn index 7d21851bc..7ec50272f 100644 --- a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn +++ b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn @@ -13,7 +13,9 @@ Flow_1my9ag5 Flow_0b04rbg - awesome_color_options = [{"value": "blue", "label": "Blue"}, {"value": "green", "label": "Green"}] + awesome_color_options = [{"value": "blue", "label": "Blue"}, {"value": "green", "label": "Green"}] +form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes"] + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index f52cbc43f..91246c317 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1686,6 +1686,9 @@ class TestProcessApi(BaseTest): response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"] == "Green" ) + assert response.json["form_ui_schema"] == { + "veryImportantFieldButOnlySometimes": {"ui:widget": "hidden"} + } def test_process_instance_list_with_default_list( self, diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 83e5df3f0..fc50df7ee 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -189,7 +189,7 @@ export default function TaskShow() { }, }; } else if (task.form_ui_schema) { - formUiSchema = JSON.parse(task.form_ui_schema); + formUiSchema = task.form_ui_schema; } if (task.state !== 'READY') { formUiSchema = Object.assign(formUiSchema || {}, { From 408759d12201b636f30722c638c832c42b4e5c36 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 15:47:35 -0500 Subject: [PATCH 34/59] show that hiding nested fields works as well --- .../data/dynamic_enum_select_fields/color_question.json | 8 ++++++++ .../dynamic_enums_ask_for_color.bpmn | 2 +- .../spiffworkflow_backend/integration/test_process_api.py | 7 ++++++- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json index a3528138c..1ce7072b8 100644 --- a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json +++ b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/color_question.json @@ -17,6 +17,14 @@ "veryImportantFieldButOnlySometimes": { "title": "Very important field", "type": "string" + }, + "building": { + "properties": { + "floor": { + "title": "Floor", + "type": "number" + } + } } } } diff --git a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn index 7ec50272f..d4f1aa5d2 100644 --- a/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn +++ b/spiffworkflow-backend/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn @@ -14,7 +14,7 @@ Flow_1my9ag5 Flow_0b04rbg awesome_color_options = [{"value": "blue", "label": "Blue"}, {"value": "green", "label": "Green"}] -form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes"] +form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes", "building.floor"] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 91246c317..c8fd5f6a3 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1686,8 +1686,13 @@ class TestProcessApi(BaseTest): response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"] == "Green" ) + + # if you set this in task data: + # form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes", "building.floor"] + # you will get this ui schema: assert response.json["form_ui_schema"] == { - "veryImportantFieldButOnlySometimes": {"ui:widget": "hidden"} + "building": {"floor": {"ui:widget": "hidden"}}, + "veryImportantFieldButOnlySometimes": {"ui:widget": "hidden"}, } def test_process_instance_list_with_default_list( From 53d99c49d193aa66b46146a532858d9b163a0515 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 16:17:36 -0500 Subject: [PATCH 35/59] refactor some stuff in task_show to separate functions --- .../routes/tasks_controller.py | 99 ++++++++++--------- 1 file changed, 55 insertions(+), 44 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 086f7a45b..72541ceb4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -170,6 +170,25 @@ def task_list_for_my_groups( ) +def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: + if task.form_ui_schema is None: + task.form_ui_schema = {} + + if task.data and "form_ui_hidden_fields" in task.data: + hidden_fields = task.data["form_ui_hidden_fields"] + for hidden_field in hidden_fields: + hidden_field_parts = hidden_field.split(".") + relevant_depth_of_ui_schema = task.form_ui_schema + for ii, hidden_field_part in enumerate(hidden_field_parts): + if hidden_field_part not in relevant_depth_of_ui_schema: + relevant_depth_of_ui_schema[hidden_field_part] = {} + relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[ + hidden_field_part + ] + if len(hidden_field_parts) == ii + 1: + relevant_depth_of_ui_schema["ui:widget"] = "hidden" + + def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: """Task_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -185,20 +204,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response process_instance.process_model_identifier, ) - human_task = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id - ).first() - if human_task is None: - raise ( - ApiError( - error_code="no_human_task", - message=( - f"Cannot find a task to complete for task id '{task_id}' and" - f" process instance {process_instance_id}." - ), - status_code=500, - ) - ) + _find_human_task_or_raise(process_instance_id, task_id) form_schema_file_name = "" form_ui_schema_file_name = "" @@ -274,22 +280,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response if ui_form_contents: task.form_ui_schema = ui_form_contents - if task.form_ui_schema is None: - task.form_ui_schema = {} - - if task.data and "form_ui_hidden_fields" in task.data: - hidden_fields = task.data["form_ui_hidden_fields"] - for hidden_field in hidden_fields: - hidden_field_parts = hidden_field.split(".") - relevant_depth_of_ui_schema = task.form_ui_schema - for ii, hidden_field_part in enumerate(hidden_field_parts): - if hidden_field_part not in relevant_depth_of_ui_schema: - relevant_depth_of_ui_schema[hidden_field_part] = {} - relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[ - hidden_field_part - ] - if len(hidden_field_parts) == ii + 1: - relevant_depth_of_ui_schema["ui:widget"] = "hidden" + _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task) if task.properties and task.data and "instructionsForEndUser" in task.properties: if task.properties["instructionsForEndUser"]: @@ -367,20 +358,11 @@ def task_submit_shared( if terminate_loop and spiff_task.is_looping(): spiff_task.terminate_loop() - human_task = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id, completed=False - ).first() - if human_task is None: - raise ( - ApiError( - error_code="no_human_task", - message=( - f"Cannot find a task to complete for task id '{task_id}' and" - f" process instance {process_instance_id}." - ), - status_code=500, - ) - ) + human_task = _find_human_task_or_raise( + process_instance_id=process_instance_id, + task_id=task_id, + only_tasks_that_can_be_completed=True, + ) with sentry_sdk.start_span(op="task", description="complete_form_task"): processor.lock_process_instance("Web") @@ -685,3 +667,32 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any: ).label("potential_owner_usernames") return potential_owner_usernames_from_group_concat_or_similar + + +def _find_human_task_or_raise( + process_instance_id: int, + task_id: str, + only_tasks_that_can_be_completed: bool = False, +) -> HumanTaskModel: + if only_tasks_that_can_be_completed: + human_task_query = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id, completed=False + ) + else: + human_task_query = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id + ) + + human_task: HumanTaskModel = human_task_query.first() + if human_task is None: + raise ( + ApiError( + error_code="no_human_task", + message=( + f"Cannot find a task to complete for task id '{task_id}' and" + f" process instance {process_instance_id}." + ), + status_code=500, + ) + ) + return human_task From d010c2bce6c75868ccd0b5dc50a6e451311290d4 Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 3 Feb 2023 17:01:03 -0500 Subject: [PATCH 36/59] Fix that dreadful unknown "KeyError" exception that was cropping up. Adding a bit of detail to the spiffworkflow exceptions when a duplicate process model is found. Disable the submit button on tasks after you click submit (avoid the double click and give users a better experience) --- .../SpiffWorkflow/bpmn/parser/BpmnParser.py | 6 +++--- .../exceptions/api_error.py | 4 ++++ .../routes/tasks_controller.py | 12 +++++++----- .../services/process_model_service.py | 5 ++++- spiffworkflow-frontend/src/routes/TaskShow.tsx | 17 +++++++++++++---- 5 files changed, 31 insertions(+), 13 deletions(-) diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py index 6b98bb8a9..7741c801c 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py @@ -48,7 +48,7 @@ from .task_parsers import ( GatewayParser, ConditionalGatewayParser, CallActivityParser, - ScriptTaskParser, + ScriptTaskParser, SubWorkflowParser, ) from .event_parsers import ( @@ -254,9 +254,9 @@ class BpmnParser(object): def create_parser(self, node, filename=None, lane=None): parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, filename=filename, lane=lane) if parser.get_id() in self.process_parsers: - raise ValidationException('Duplicate process ID', node=node, file_name=filename) + raise ValidationException(f'Duplicate process ID: {parser.get_id()}', node=node, file_name=filename) if parser.get_name() in self.process_parsers_by_name: - raise ValidationException('Duplicate process name', node=node, file_name=filename) + raise ValidationException(f'Duplicate process name: {parser.get_name()}', node=node, file_name=filename) self.process_parsers[parser.get_id()] = parser self.process_parsers_by_name[parser.get_name()] = parser diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 9fe08bec5..bb6d84f4f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -242,6 +242,10 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: api_exception = None if isinstance(exception, ApiError): api_exception = exception + elif isinstance(exception, SpiffWorkflowException): + api_exception = ApiError.from_workflow_exception( + "unexpected_workflow_exception", "Unexpected Workflow Error", exception + ) else: api_exception = ApiError( error_code=error_code, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 2879c1207..6f272287b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -275,7 +275,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ) from exception if task.data: - _update_form_schema_with_task_data_as_needed(form_dict, task) + _update_form_schema_with_task_data_as_needed(form_dict, task, spiff_task) if form_contents: task.form_schema = form_dict @@ -588,7 +588,9 @@ def _get_spiff_task_from_process_instance( # originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches -def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task) -> None: +def _update_form_schema_with_task_data_as_needed( + in_dict: dict, task: Task, spiff_task: SpiffTask +) -> None: """Update_nested.""" if task.data is None: return None @@ -615,7 +617,7 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task) -> N f" '{task_data_var}' but it doesn't exist in" " the Task Data." ), - task=task, + task=spiff_task, ) raise ( ApiError.from_workflow_exception( @@ -648,11 +650,11 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task) -> N in_dict[k] = options_for_react_json_schema_form elif isinstance(value, dict): - _update_form_schema_with_task_data_as_needed(value, task) + _update_form_schema_with_task_data_as_needed(value, task, spiff_task) elif isinstance(value, list): for o in value: if isinstance(o, dict): - _update_form_schema_with_task_data_as_needed(o, task) + _update_form_schema_with_task_data_as_needed(o, task, spiff_task) def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py index d0c43fb24..d00ed0117 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py @@ -499,7 +499,10 @@ class ProcessModelService(FileSystemService): if name is None: raise ApiError( error_code="missing_name_of_process_model", - message="Missing name of process model. It should be given", + message=( + "Missing name of process model. Path not found:" + f" {json_file_path}" + ), ) process_model_info = ProcessModelInfo( diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index d68bd3da9..a20e7b7b5 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -41,7 +41,10 @@ export default function TaskShow() { // instead of passing the process model identifier in through the params HttpService.makeCallToBackend({ path: url, - successCallback: setUserTasks, + successCallback: (tasks: any) => { + setDisabled(false); + setUserTasks(tasks); + }, onUnauthorized: () => {}, failureCallback: (error: any) => { addError(error); @@ -59,7 +62,6 @@ export default function TaskShow() { const processSubmitResult = (result: any) => { removeError(); - setDisabled(false); if (result.ok) { navigate(`/tasks`); } else if (result.process_instance_id) { @@ -212,10 +214,16 @@ export default function TaskShow() { reactFragmentToHideSubmitButton =
; } - if (task.type === 'Manual Task' && task.state === 'READY') { + if (task.state === 'READY') { + let buttonText = 'Submit'; + if (task.type === 'Manual Task') { + buttonText = 'Continue'; + } reactFragmentToHideSubmitButton = (
- +
); } @@ -228,6 +236,7 @@ export default function TaskShow() {
Date: Fri, 3 Feb 2023 17:31:14 -0500 Subject: [PATCH 37/59] put setDisabled back in the awkward place since i was seeing the subsequent form stay disabled --- spiffworkflow-frontend/src/routes/TaskShow.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 48d8e9bbb..93100a288 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -42,7 +42,6 @@ export default function TaskShow() { HttpService.makeCallToBackend({ path: url, successCallback: (tasks: any) => { - setDisabled(false); setUserTasks(tasks); }, onUnauthorized: () => {}, @@ -62,6 +61,7 @@ export default function TaskShow() { const processSubmitResult = (result: any) => { removeError(); + setDisabled(false); if (result.ok) { navigate(`/tasks`); } else if (result.process_instance_id) { From a9f00ce1a7ed2c7b2c4c556e523fcda160c4290f Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 4 Feb 2023 00:03:32 -0500 Subject: [PATCH 38/59] if there are tenant specific fields in the config, transfer them from openid token to db --- .../migrations/versions/ca9b79dde5cc_.py | 32 +++++++++++++ .../spiffworkflow_backend/config/__init__.py | 14 ++++++ .../spiffworkflow_backend/config/default.py | 7 ++- .../src/spiffworkflow_backend/models/user.py | 3 ++ .../services/authorization_service.py | 46 +++++++++++-------- 5 files changed, 81 insertions(+), 21 deletions(-) create mode 100644 spiffworkflow-backend/migrations/versions/ca9b79dde5cc_.py diff --git a/spiffworkflow-backend/migrations/versions/ca9b79dde5cc_.py b/spiffworkflow-backend/migrations/versions/ca9b79dde5cc_.py new file mode 100644 index 000000000..8a7134f41 --- /dev/null +++ b/spiffworkflow-backend/migrations/versions/ca9b79dde5cc_.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: ca9b79dde5cc +Revises: 2ec4222f0012 +Create Date: 2023-02-03 21:06:56.396816 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ca9b79dde5cc' +down_revision = '2ec4222f0012' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('user', sa.Column('tenant_specific_field_1', sa.String(length=255), nullable=True)) + op.add_column('user', sa.Column('tenant_specific_field_2', sa.String(length=255), nullable=True)) + op.add_column('user', sa.Column('tenant_specific_field_3', sa.String(length=255), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('user', 'tenant_specific_field_3') + op.drop_column('user', 'tenant_specific_field_2') + op.drop_column('user', 'tenant_specific_field_1') + # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index 64c7e2c1a..24fc452de 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -51,6 +51,19 @@ def load_config_file(app: Flask, env_config_module: str) -> None: ) from exception +def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None: + tenant_specific_fields = app.config.get("TENANT_SPECIFIC_FIELDS") + + if tenant_specific_fields is None or tenant_specific_fields == "": + app.config["TENANT_SPECIFIC_FIELDS"] = [] + else: + app.config["TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",") + if len(app.config["TENANT_SPECIFIC_FIELDS"]) > 3: + raise ConfigurationError( + "TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields" + ) + + def setup_config(app: Flask) -> None: """Setup_config.""" # ensure the instance folder exists @@ -108,3 +121,4 @@ def setup_config(app: Flask) -> None: thread_local_data = threading.local() app.config["THREAD_LOCAL_DATA"] = thread_local_data + _set_up_tenant_specific_fields_as_list_of_strings(app) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 4f0a82966..52126b1b5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -72,7 +72,7 @@ GIT_SSH_PRIVATE_KEY = environ.get("GIT_SSH_PRIVATE_KEY") GIT_USERNAME = environ.get("GIT_USERNAME") GIT_USER_EMAIL = environ.get("GIT_USER_EMAIL") -# Datbase Configuration +# Database Configuration SPIFF_DATABASE_TYPE = environ.get( "SPIFF_DATABASE_TYPE", default="mysql" ) # can also be sqlite, postgres @@ -88,3 +88,8 @@ SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get( ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int( environ.get("ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600") ) + +# Tenant specific fields is a comma separated list of field names that we will convert to list of strings +# and store in the user table's tenant_specific_field_n columns. You can have up to three items in this +# comma-separated list. +TENANT_SPECIFIC_FIELDS = environ.get("TENANT_SPECIFIC_FIELDS") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index 7f8c88da9..3ecb8b284 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -34,6 +34,9 @@ class UserModel(SpiffworkflowBaseDBModel): service_id = db.Column(db.String(255), nullable=False, unique=False) display_name = db.Column(db.String(255)) email = db.Column(db.String(255)) + tenant_specific_field_1 = db.Column(db.String(255)) + tenant_specific_field_2 = db.Column(db.String(255)) + tenant_specific_field_3 = db.Column(db.String(255)) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index a72effd46..9134a4bad 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -484,38 +484,44 @@ class AuthorizationService: .filter(UserModel.service_id == user_info["sub"]) .first() ) - email = display_name = username = "" + user_attributes = {} + if "email" in user_info: - username = user_info["email"] - email = user_info["email"] + user_attributes["username"] = user_info["email"] + user_attributes["email"] = user_info["email"] else: # we fall back to the sub, which may be very ugly. - username = user_info["sub"] + "@" + user_info["iss"] + fallback_username = user_info["sub"] + "@" + user_info["iss"] + user_attributes["username"] = fallback_username if "preferred_username" in user_info: - display_name = user_info["preferred_username"] + user_attributes["display_name"] = user_info[ + "preferred_username" + ] elif "nickname" in user_info: - display_name = user_info["nickname"] + user_attributes["display_name"] = user_info["nickname"] elif "name" in user_info: - display_name = user_info["name"] + user_attributes["display_name"] = user_info["name"] + + user_attributes["service"] = user_info["iss"] + user_attributes["service_id"] = user_info["sub"] + + for field_index, tenant_specific_field in enumerate( + current_app.config["TENANT_SPECIFIC_FIELDS"] + ): + if tenant_specific_field in user_info: + field_number = field_index + 1 + user_attributes[ + f"tenant_specific_field_{field_number}" + ] = user_info[tenant_specific_field] if user_model is None: current_app.logger.debug("create_user in login_return") is_new_user = True - user_model = UserService().create_user( - username=username, - service=user_info["iss"], - service_id=user_info["sub"], - email=email, - display_name=display_name, - ) - + user_model = UserService().create_user(**user_attributes) else: # Update with the latest information - user_model.username = username - user_model.email = email - user_model.display_name = display_name - user_model.service = user_info["iss"] - user_model.service_id = user_info["sub"] + for key, value in user_attributes.items(): + setattr(user_model, key, value) # this may eventually get too slow. # when it does, be careful about backgrounding, because From 60ab826e1b914be3b7a957e2e1c633b6903b8c85 Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 4 Feb 2023 00:09:43 -0500 Subject: [PATCH 39/59] lint --- .../spiffworkflow_backend/scripts/get_current_user.py | 4 +--- .../services/authorization_service.py | 10 ++++------ 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py index 66d21a4ca..e4b524c2a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py @@ -1,4 +1,4 @@ -"""Get_env.""" +"""Get current user.""" from typing import Any from flask import g @@ -10,8 +10,6 @@ from spiffworkflow_backend.scripts.script import Script class GetCurrentUser(Script): - """GetCurrentUser.""" - @staticmethod def requires_privileged_permissions() -> bool: """We have deemed this function safe to run without elevated permissions.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 9134a4bad..ed0eed270 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -494,9 +494,7 @@ class AuthorizationService: user_attributes["username"] = fallback_username if "preferred_username" in user_info: - user_attributes["display_name"] = user_info[ - "preferred_username" - ] + user_attributes["display_name"] = user_info["preferred_username"] elif "nickname" in user_info: user_attributes["display_name"] = user_info["nickname"] elif "name" in user_info: @@ -510,9 +508,9 @@ class AuthorizationService: ): if tenant_specific_field in user_info: field_number = field_index + 1 - user_attributes[ - f"tenant_specific_field_{field_number}" - ] = user_info[tenant_specific_field] + user_attributes[f"tenant_specific_field_{field_number}"] = user_info[ + tenant_specific_field + ] if user_model is None: current_app.logger.debug("create_user in login_return") From 32007c3cbbaf8dc816ee841e48c1e83ede260a75 Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 4 Feb 2023 00:12:01 -0500 Subject: [PATCH 40/59] grab bamboo_id from keycloak --- .../realm_exports/spiffworkflow-realm.json | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index eab3bd968..bbe6ecdac 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -1005,6 +1005,9 @@ "totp" : false, "emailVerified" : false, "email" : "kb@sartography.com", + "attributes" : { + "bamboo_id" : [ "42" ] + }, "credentials" : [ { "id" : "2c0be363-038f-48f1-86d6-91fdd28657cf", "type" : "password", @@ -2035,6 +2038,21 @@ "claim.name" : "clientId", "jsonType.label" : "String" } + }, { + "id" : "a7692d41-b905-4049-9004-f6bea690051d", + "name" : "bamboo_id", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "aggregate.attrs" : "false", + "userinfo.token.claim" : "true", + "multivalued" : "false", + "user.attribute" : "bamboo_id", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "bamboo_id" + } } ], "defaultClientScopes" : [ "web-origins", "acr", "profile", "roles", "email" ], "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ], @@ -3554,4 +3572,4 @@ "clientPolicies" : { "policies" : [ ] } -} +} \ No newline at end of file From 84008cb6c12e9013d6f941f99bd2beaa21a3328e Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 4 Feb 2023 00:36:17 -0500 Subject: [PATCH 41/59] hoping to fix tests on windows --- .../keycloak/realm_exports/spiffworkflow-realm.json | 2 +- .../src/spiffworkflow_backend/__init__.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index bbe6ecdac..0642321a9 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -3572,4 +3572,4 @@ "clientPolicies" : { "policies" : [ ] } -} \ No newline at end of file +} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 341cfac8d..420f990c0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -1,5 +1,6 @@ """__init__.""" import os +import sys from typing import Any import connexion # type: ignore @@ -203,6 +204,9 @@ def configure_sentry(app: flask.app.Flask) -> None: if sentry_traces_sample_rate is None: raise Exception("SENTRY_TRACES_SAMPLE_RATE is not set somehow") + # profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62 + profiles_sample_rate = 0 if sys.platform.startswith("win") else 1 + sentry_sdk.init( dsn=app.config.get("SENTRY_DSN"), integrations=[ @@ -218,8 +222,6 @@ def configure_sentry(app: flask.app.Flask) -> None: traces_sample_rate=float(sentry_traces_sample_rate), traces_sampler=traces_sampler, # The profiles_sample_rate setting is relative to the traces_sample_rate setting. - _experiments={ - "profiles_sample_rate": 1, - }, + _experiments={"profiles_sample_rate": profiles_sample_rate}, before_send=before_send, ) From 3b5763e8d017410ce4616347dd8ef8e55ed66ef4 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Sat, 4 Feb 2023 12:55:48 -0500 Subject: [PATCH 42/59] Back to inserting every log --- .../src/spiffworkflow_backend/services/logging_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 9981e1eb8..aba695b85 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -243,5 +243,5 @@ class DBHandler(logging.Handler): # so at some point we are going to insert logs. # we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting # on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log. - if len(self.logs) % 100 == 0: + if len(self.logs) % 1 == 0: self.bulk_insert_logs() From 0b3f67abae7c6c3f9a9412e5417c876ac736655c Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 6 Feb 2023 08:01:37 -0500 Subject: [PATCH 43/59] When searching for human tasks to determine if the current user can complete it, filter on the "completed" flag. Front-end -- enable the form if you receive an onUnathorized error because the thing you are unauthorized to do might have nothing to do with whether you can submit the form. --- .../spiffworkflow_backend/services/authorization_service.py | 1 + spiffworkflow-frontend/src/routes/TaskShow.tsx | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index a72effd46..db3c3cced 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -457,6 +457,7 @@ class AuthorizationService: human_task = HumanTaskModel.query.filter_by( task_name=spiff_task.task_spec.name, process_instance_id=process_instance_id, + completed=False ).first() if human_task is None: raise HumanTaskNotFoundError( diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 93100a288..948bafb73 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -42,9 +42,12 @@ export default function TaskShow() { HttpService.makeCallToBackend({ path: url, successCallback: (tasks: any) => { + setDisabled(false); setUserTasks(tasks); }, - onUnauthorized: () => {}, + onUnauthorized: () => { + setDisabled(false); + }, failureCallback: (error: any) => { addError(error); }, @@ -61,7 +64,6 @@ export default function TaskShow() { const processSubmitResult = (result: any) => { removeError(); - setDisabled(false); if (result.ok) { navigate(`/tasks`); } else if (result.process_instance_id) { From c7438253c74e114dce821818a206b24d0a0de840 Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 6 Feb 2023 08:05:33 -0500 Subject: [PATCH 44/59] run_pyl --- .../src/spiffworkflow_backend/services/authorization_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index db3c3cced..2491d44f4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -457,7 +457,7 @@ class AuthorizationService: human_task = HumanTaskModel.query.filter_by( task_name=spiff_task.task_spec.name, process_instance_id=process_instance_id, - completed=False + completed=False, ).first() if human_task is None: raise HumanTaskNotFoundError( From 8786f33cd28803a287880b096862c9c1a9171ee8 Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 6 Feb 2023 08:35:44 -0500 Subject: [PATCH 45/59] Fix docker compose file. --- docker-compose.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 138c3ff84..3cf2efdeb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,10 +13,9 @@ services: - "${SPIFF_FRONTEND_PORT:-8001}:${SPIFF_FRONTEND_PORT:-8001}/tcp" spiffworkflow-backend: -# container_name: spiffworkflow-backend + container_name: spiffworkflow-backend build: ./spiffworkflow-backend/. -# dockerfile: Dockerfile -# image: ghcr.io/sartography/spiffworkflow-backend:latest + image: ghcr.io/sartography/spiffworkflow-backend:latest depends_on: spiffworkflow-db: condition: service_healthy From ab9614c6b483f62e1c45d0863268128377f36d42 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 6 Feb 2023 10:03:51 -0500 Subject: [PATCH 46/59] add test users --- .../realm_exports/spiffworkflow-realm.json | 130 ++++++++++++++---- .../keycloak/test_user_lists/status | 4 + 2 files changed, 109 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index eab3bd968..b4e2e08a5 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -1325,6 +1325,86 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "3bfb62f7-527d-4df5-94d0-6cdc23353fa3", + "createdTimestamp" : 1675695752975, + "username" : "peopleops.talent.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "peopleops.talent.program-lead@status.im", + "credentials" : [ { + "id" : "624b34ec-9a8a-45cd-bf50-6fe24a125b4e", + "type" : "password", + "createdDate" : 1675695753041, + "secretData" : "{\"value\":\"K/8rrCMCBlq+PzZudTFBBjIXPLOs35f4aW9cLSH4XLlTgS/IGkMv1EMPXwkSHJayxxF5TdwDOkLB6a7QDR3nvA==\",\"salt\":\"KZonqKccY/OcmZktAPXzLw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "cfadd1f9-eb8f-4b0a-ae04-4c8b98b5244a", + "createdTimestamp" : 1675695753095, + "username" : "peopleops.talent.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "peopleops.talent.project-lead@status.im", + "credentials" : [ { + "id" : "c64e4b50-7535-4ed4-941a-e474093c9ed1", + "type" : "password", + "createdDate" : 1675695753133, + "secretData" : "{\"value\":\"OIPhql7gjZGNV0AW3EVzo9VbdrK6+7n9hMqo0BXi4nUU1U3ljWS+/gmP3WbrRHi7tZme0ytrATi8KvY2dCKZKg==\",\"salt\":\"r3Ti57CEWUTKvp6Tr5ApEQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "90697442-0ceb-452b-8d6c-d3be528f1b54", + "createdTimestamp" : 1675695753161, + "username" : "peopleops.talent.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "peopleops.talent.sme@status.im", + "credentials" : [ { + "id" : "5b335757-d786-454e-941e-2c001a44fff6", + "type" : "password", + "createdDate" : 1675695753198, + "secretData" : "{\"value\":\"VRI6HxuZ+Oq/vi20d4UEQxxPQb4YyYpWhNtD7Q4CDmgyNnxsRvrbPYtvgaMHUZpHReCSXU4nYBNT1NHDi2KpYA==\",\"salt\":\"Rj1RljhwnjzqxTcLwVLbyg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "0c0c2fa1-e043-4f50-8331-68d2df73e0c3", + "createdTimestamp" : 1675695753226, + "username" : "peopleops.talent1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "peopleops.talent1.sme@status.im", + "credentials" : [ { + "id" : "548b5d7c-df97-462b-b7db-abc1a40a916e", + "type" : "password", + "createdDate" : 1675695753261, + "secretData" : "{\"value\":\"OX9q+pOP7BSVfZhlg6FeAsVCG+tYGuKPdFPGluuKxmdEHGgixJp8X6D4btxZb1HXOX8NR8hukf3npGeCKSqohQ==\",\"salt\":\"mUju+e0jzVc1nGktGz77iw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "c832f75b-7a0e-4d8a-8aee-f2e0f2aaf9d4", "createdTimestamp" : 1674743245003, @@ -2852,7 +2932,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-usermodel-property-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2870,7 +2950,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-address-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2960,7 +3040,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "cb39eda2-18c2-4b03-9d7c-672a2bd47d19", + "id" : "946724d3-fc95-4d8b-8e80-1b5441d16133", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2982,7 +3062,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "96d4e28f-51ad-4737-87b4-5a10484ceb8b", + "id" : "f1e5a918-3f15-4ff9-80fa-e1800a9ceb76", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -3011,7 +3091,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8f4c884d-93cd-4404-bc3a-1fa717b070c5", + "id" : "a91fda66-1614-4360-8741-6ece523feda5", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -3033,7 +3113,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "166d1879-dd61-4fb4-b4f6-0a4d69f49da8", + "id" : "38d95d5b-ba7e-4f69-acd6-fd9a5d9b252f", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -3055,7 +3135,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "18cab8f9-f010-4226-a86e-8da2f1632304", + "id" : "bba1cfc6-c391-47c4-b1f9-26178cc70b73", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -3077,7 +3157,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "04d8d1d1-5253-4644-b55d-8c9317818b33", + "id" : "9532380c-6a4f-4bde-8822-24d2125f2f9a", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -3099,7 +3179,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2bf21e1d-ff7e-4d52-8be7-31355945c302", + "id" : "f81bae40-7ac5-4641-8933-588c17a62754", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -3121,7 +3201,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "fa8636a5-9969-41a5-9fef-9c825cceb819", + "id" : "51dfe92b-25bc-4c00-b5e2-6678fb018398", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -3144,7 +3224,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8656a884-6645-40b5-b075-c40736e27811", + "id" : "0778fbd6-37d2-4eac-8ee9-a2bfdc081a48", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -3166,7 +3246,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0d88d334-bfa4-4cf1-9fa3-17d0df0151d1", + "id" : "5b5049d4-b785-451f-bd91-bd8ed97df297", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -3202,7 +3282,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "9b195d67-e3e6-4983-8607-533b739ebd97", + "id" : "921359fe-b30f-4f48-8565-9d745ee6216c", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -3238,7 +3318,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "fd0273a1-f6f4-4df1-a057-54ac4e91f4a9", + "id" : "1ae55b9d-fe3d-491c-a613-5bfc070334dc", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -3267,7 +3347,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b457cba8-ef31-473b-a481-c095b2f4eb48", + "id" : "bb23c1a5-6bca-4fee-b155-db6e219bb14b", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -3282,7 +3362,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "97519504-fd69-4c08-bd27-15d26fbc9b76", + "id" : "34dca5ce-cc7a-479d-bfa0-3eac6185e0ea", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -3305,7 +3385,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "fc6a4468-1a78-410d-ac97-cf9f05814850", + "id" : "67c2a159-5ce7-46e0-ab24-d4a3d3504be1", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -3327,7 +3407,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "97a25d8a-25a0-4bf4-be6d-a6f019cf3a32", + "id" : "85b95d44-d930-4a54-ae1a-ecdb763f0382", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3349,7 +3429,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "671e8ec7-af31-4c54-b6bb-96ebe69881de", + "id" : "ba58a228-ebea-4dd0-a94c-538ba4cae9b7", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3365,7 +3445,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "24d6aaaa-5202-4401-99c3-bb15925bd5be", + "id" : "91e9d8a6-0270-4b24-b9bf-3e6df67b07d4", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3401,7 +3481,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f948bd43-ff05-4245-be30-a0a0dad2b7f0", + "id" : "70dac74c-13bc-4ff5-b26a-661b335c74b0", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3437,7 +3517,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7e4aaea7-05ca-4aa0-b934-4c81614620a8", + "id" : "d226a0ad-398c-426a-bf29-3d8019ec685e", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3453,13 +3533,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "14ca1058-25e7-41f6-85ce-ad0bfce2c67c", + "id" : "9e659f3e-613d-4b69-9ed5-e511a0ba541f", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "16803de1-f7dc-4293-acde-fd0eae264377", + "id" : "779aa3ef-3e89-4b36-b902-a9f95830c799", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" @@ -3554,4 +3634,4 @@ "clientPolicies" : { "policies" : [ ] } -} +} \ No newline at end of file diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 66da936e9..49770838f 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -29,6 +29,10 @@ legal.sme@status.im legal1.sme@status.im manuchehr@status.im peopleops.partner@status.im +peopleops.talent.program-lead@status.im +peopleops.talent.project-lead@status.im +peopleops.talent.sme@status.im +peopleops.talent1.sme@status.im peopleops.talent@status.im ppg.ba.program-lead@status.im ppg.ba.project-lead@status.im From 8ce5855b78bb9d240ae967418aa9d68e164683b3 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 6 Feb 2023 10:30:38 -0500 Subject: [PATCH 47/59] move towards returning dict in get_current_user --- .../scripts/get_current_user.py | 2 +- .../scripts/test_get_current_user.py | 44 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_current_user.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py index e4b524c2a..53ebd1c5b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py @@ -26,4 +26,4 @@ class GetCurrentUser(Script): **kwargs: Any ) -> Any: """Run.""" - return g.user.username + return g.user.__dict__ diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_current_user.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_current_user.py new file mode 100644 index 000000000..146baaf65 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_current_user.py @@ -0,0 +1,44 @@ +"""Test_get_localtime.""" +from flask.app import Flask +from flask import g +from flask.testing import FlaskClient +from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext +from spiffworkflow_backend.scripts.get_current_user import GetCurrentUser +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.user_service import UserService + + +class TestGetCurrentUser(BaseTest): + + def test_get_current_user( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_members_of_a_group.""" + testuser1 = self.find_or_create_user("testuser1") + testuser1.tenant_specific_field_1 = "456" + db.session.add(testuser1) + g.user = testuser1 + process_model_identifier = "test_process_model" + process_instance_id = 1 + script_attributes_context = ScriptAttributesContext( + task=None, + environment_identifier="testing", + process_instance_id=process_instance_id, + process_model_identifier=process_model_identifier, + ) + result = GetCurrentUser().run( + script_attributes_context, + ) + assert result['username'] == "testuser1" From 0e3ba4664fc12c542ede524b1195fc0abf12d7ca Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 6 Feb 2023 12:06:37 -0500 Subject: [PATCH 48/59] When catching non-jinja errors from Jinja, raise a good error message, and make a best effort at tracking down the line number and error line if possible. --- .../routes/tasks_controller.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 79cd84249..f7a6e57e2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -2,6 +2,7 @@ import json import os import uuid +from sys import exc_info from typing import Any from typing import Dict from typing import Optional @@ -562,6 +563,20 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> "Jinja2 template errors can happen when trying to displaying task data" ) raise wfe from template_error + except Exception as error: + type, value, tb = exc_info() + wfe = WorkflowTaskException( + str(error), task=spiff_task, exception=error + ) + while tb: + if tb.tb_frame.f_code.co_filename == '