diff --git a/.gitignore b/.gitignore
index 58cb14347..a61561685 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,7 +1,7 @@
.mypy_cache/
/.idea/
/.coverage
-/.coverage.*
+.coverage.*
/.nox/
/.python-version
/.pytype/
diff --git a/bin/delete_and_import_all_permissions.py b/bin/delete_and_import_all_permissions.py
index a55e36e7f..966ec5a11 100644
--- a/bin/delete_and_import_all_permissions.py
+++ b/bin/delete_and_import_all_permissions.py
@@ -7,7 +7,8 @@ def main() -> None:
"""Main."""
app = get_hacked_up_app_for_script()
with app.app_context():
- AuthorizationService.delete_all_permissions_and_recreate()
+ AuthorizationService.delete_all_permissions()
+ AuthorizationService.import_permissions_from_yaml_file()
if __name__ == "__main__":
diff --git a/bin/get_bpmn_json_for_process_instance b/bin/get_bpmn_json_for_process_instance
old mode 100755
new mode 100644
index 9b6b4c757..dbce01ecc
--- a/bin/get_bpmn_json_for_process_instance
+++ b/bin/get_bpmn_json_for_process_instance
@@ -1,5 +1,4 @@
"""Get the bpmn process json for a given process instance id and store it in /tmp."""
-#!/usr/bin/env python
import os
import sys
@@ -18,15 +17,17 @@ def main(process_instance_id: str):
id=process_instance_id
).first()
+ file_path = f"/tmp/{process_instance_id}_bpmn_json.json"
if not process_instance:
raise Exception(
f"Could not find a process instance with id: {process_instance_id}"
)
with open(
- f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8"
+ file_path, "w", encoding="utf-8"
) as f:
f.write(process_instance.bpmn_json)
+ print(f"Saved to {file_path}")
if len(sys.argv) < 2:
diff --git a/bin/get_logs_from_docker_compose b/bin/get_logs_from_docker_compose
index 78c7684e3..d2c06c6f3 100755
--- a/bin/get_logs_from_docker_compose
+++ b/bin/get_logs_from_docker_compose
@@ -7,4 +7,5 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
-docker compose logs "$@"
+# "docker compose logs" is only getting the db logs so specify them both
+docker compose logs db spiffworkflow-backend
diff --git a/bin/get_perms b/bin/get_perms
new file mode 100755
index 000000000..5e0dbd6de
--- /dev/null
+++ b/bin/get_perms
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+function error_handler() {
+ >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
+ exit "$2"
+}
+trap 'error_handler ${LINENO} $?' ERR
+set -o errtrace -o errexit -o nounset -o pipefail
+
+set -x
+mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;'
diff --git a/bin/get_routes b/bin/get_routes
new file mode 100755
index 000000000..63f194ef0
--- /dev/null
+++ b/bin/get_routes
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+function error_handler() {
+ >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
+ exit "$2"
+}
+trap 'error_handler ${LINENO} $?' ERR
+set -o errtrace -o errexit -o nounset -o pipefail
+
+grep -E '^ +\/' src/spiffworkflow_backend/api.yml | sort
diff --git a/bin/git_commit_bpmn_models_repo b/bin/git_commit_bpmn_models_repo
index 62fc0cab0..0ba512021 100755
--- a/bin/git_commit_bpmn_models_repo
+++ b/bin/git_commit_bpmn_models_repo
@@ -14,21 +14,39 @@ git_commit_message="$2"
git_branch="$3"
git_commit_username="$4"
git_commit_email="$5"
+git_commit_password="$6"
-if [[ -z "${5:-}" ]]; then
+if [[ -z "${6:-}" ]]; then
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]"
exit 1
fi
-cd "$bpmn_models_absolute_dir"
-git add .
+function failed_to_get_lock() {
+ >&2 echo "ERROR: Failed to get lock."
+ exit 1
+}
-# https://unix.stackexchange.com/a/155077/456630
-if [ -z "$(git status --porcelain)" ]; then
- echo "No changes to commit"
-else
- git config --local user.name "$git_commit_username"
- git config --local user.email "$git_commit_email"
- git commit -m "$git_commit_message"
- git push --set-upstream origin "$git_branch"
-fi
+function run() {
+ cd "$bpmn_models_absolute_dir"
+ git add .
+
+ # https://unix.stackexchange.com/a/155077/456630
+ if [ -z "$(git status --porcelain)" ]; then
+ echo "No changes to commit"
+ else
+ PAT="${git_commit_username}:${git_commit_password}"
+ AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n')
+
+ git config --local user.name "$git_commit_username"
+ git config --local user.email "$git_commit_email"
+ git config --local http.extraHeader "Authorization: Basic $AUTH"
+ git commit -m "$git_commit_message"
+ git push --set-upstream origin "$git_branch"
+ git config --unset --local http.extraHeader
+ fi
+}
+
+exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock
+flock --timeout 60 "$lock_fd" || failed_to_get_lock
+run
+flock -u "$lock_fd"
diff --git a/bin/import_tickets_for_command_line.py b/bin/import_tickets_for_command_line.py
index e193b5990..cc94ba545 100644
--- a/bin/import_tickets_for_command_line.py
+++ b/bin/import_tickets_for_command_line.py
@@ -27,7 +27,6 @@ def main():
"""Main."""
app = get_hacked_up_app_for_script()
with app.app_context():
-
process_model_identifier_ticket = "ticket"
db.session.query(ProcessInstanceModel).filter(
ProcessInstanceModel.process_model_identifier
diff --git a/bin/keycloak_test_server.py b/bin/keycloak_test_server.py
index 59efd36c5..3e9334938 100644
--- a/bin/keycloak_test_server.py
+++ b/bin/keycloak_test_server.py
@@ -40,7 +40,8 @@ def hello_world():
return (
'Hello, %s, See private '
'Log out'
- ) % oidc.user_getfield("preferred_username")
+ % oidc.user_getfield("preferred_username")
+ )
else:
return 'Welcome anonymous, Log in'
diff --git a/bin/recreate_db b/bin/recreate_db
index 5eb248fe0..ec38c7b39 100755
--- a/bin/recreate_db
+++ b/bin/recreate_db
@@ -61,3 +61,7 @@ for task in $tasks; do
done
SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
+if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
+ mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
+ FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
+fi
diff --git a/bin/spiffworkflow-realm.json b/bin/spiffworkflow-realm.json
index a30f53c14..e31942cf1 100644
--- a/bin/spiffworkflow-realm.json
+++ b/bin/spiffworkflow-realm.json
@@ -426,6 +426,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "admin@spiffworkflow.org",
"credentials" : [ {
"id" : "ef435043-ef0c-407a-af5b-ced13182a408",
"type" : "password",
@@ -446,6 +447,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "alex@sartography.com",
"credentials" : [ {
"id" : "81a61a3b-228d-42b3-b39a-f62d8e7f57ca",
"type" : "password",
@@ -465,6 +467,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "amir@status.im",
"credentials" : [ {
"id" : "e589f3ad-bf7b-4756-89f7-7894c03c2831",
"type" : "password",
@@ -484,6 +487,9 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "firstName" : "",
+ "lastName" : "",
+ "email" : "ciadmin1@spiffworkflow.org",
"credentials" : [ {
"id" : "111b5ea1-c2ab-470a-a16b-2373bc94de7a",
"type" : "password",
@@ -499,28 +505,6 @@
},
"notBefore" : 0,
"groups" : [ ]
- }, {
- "id" : "56457e8f-47c6-4f9f-a72b-473dea5edfeb",
- "createdTimestamp" : 1657139955336,
- "username" : "ciuser1",
- "enabled" : true,
- "totp" : false,
- "emailVerified" : false,
- "credentials" : [ {
- "id" : "762f36e9-47af-44da-8520-cf09d752497a",
- "type" : "password",
- "createdDate" : 1657139966468,
- "secretData" : "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}",
- "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
- } ],
- "disableableCredentialTypes" : [ ],
- "requiredActions" : [ ],
- "realmRoles" : [ "default-roles-spiffworkflow" ],
- "clientRoles" : {
- "spiffworkflow-backend" : [ "uma_protection" ]
- },
- "notBefore" : 0,
- "groups" : [ ]
}, {
"id" : "d58b61cc-a77e-488f-a427-05f4e0572e20",
"createdTimestamp" : 1669132945413,
@@ -530,6 +514,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "core@status.im",
"credentials" : [ {
"id" : "ee80092b-8ee6-4699-8492-566e088b48f5",
"type" : "password",
@@ -550,6 +535,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "dan@sartography.com",
"credentials" : [ {
"id" : "d517c520-f500-4542-80e5-7144daef1e32",
"type" : "password",
@@ -569,6 +555,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "daniel@sartography.com",
"credentials" : [ {
"id" : "f240495c-265b-42fc-99db-46928580d07d",
"type" : "password",
@@ -588,6 +575,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "elizabeth@sartography.com",
"credentials" : [ {
"id" : "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2",
"type" : "password",
@@ -609,6 +597,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "fin@status.im",
"credentials" : [ {
"id" : "2379940c-98b4-481a-b629-0bd1a4e91acf",
"type" : "password",
@@ -631,6 +620,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "fin1@status.im",
"credentials" : [ {
"id" : "96216746-ff72-454e-8288-232428d10b42",
"type" : "password",
@@ -651,6 +641,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "finance_user1@status.im",
"credentials" : [ {
"id" : "f14722ec-13a7-4d35-a4ec-0475d405ae58",
"type" : "password",
@@ -670,6 +661,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "harmeet@status.im",
"credentials" : [ {
"id" : "89c26090-9bd3-46ac-b038-883d02e3f125",
"type" : "password",
@@ -691,6 +683,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "j@status.im",
"credentials" : [ {
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
"type" : "password",
@@ -711,6 +704,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "jakub@status.im",
"credentials" : [ {
"id" : "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2",
"type" : "password",
@@ -730,6 +724,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "jarrad@status.im",
"credentials" : [ {
"id" : "113e0343-1069-476d-83f9-21d98edb9cfa",
"type" : "password",
@@ -749,6 +744,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "jason@sartography.com",
"credentials" : [ {
"id" : "40abf32e-f0cc-4a17-8231-1a69a02c1b0b",
"type" : "password",
@@ -768,6 +764,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "jon@sartography.com",
"credentials" : [ {
"id" : "8b520e01-5b9b-44ab-9ee8-505bd0831a45",
"type" : "password",
@@ -787,6 +784,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "kb@sartography.com",
"credentials" : [ {
"id" : "2c0be363-038f-48f1-86d6-91fdd28657cf",
"type" : "password",
@@ -808,6 +806,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "lead@status.im",
"credentials" : [ {
"id" : "96e836a4-1a84-45c5-a9ed-651b0c90195e",
"type" : "password",
@@ -830,6 +829,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
+ "email" : "lead1@status.im",
"credentials" : [ {
"id" : "4e17388b-6c44-44e1-b20a-a873c0feb9a8",
"type" : "password",
@@ -850,6 +850,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "manuchehr@status.im",
"credentials" : [ {
"id" : "07dabf55-b5d3-4f98-abba-3334086ecf5e",
"type" : "password",
@@ -869,6 +870,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "mike@sartography.com",
"credentials" : [ {
"id" : "1ed375fb-0f1a-4c2a-9243-2477242cf7bd",
"type" : "password",
@@ -887,7 +889,10 @@
"username" : "natalia",
"enabled" : true,
"totp" : false,
- "emailVerified" : false,
+ "emailVerified" : true,
+ "firstName" : "",
+ "lastName" : "",
+ "email" : "natalia@sartography.com",
"credentials" : [ {
"id" : "b6aa9936-39cc-4931-bfeb-60e6753de5ba",
"type" : "password",
@@ -907,6 +912,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "sasha@status.im",
"credentials" : [ {
"id" : "4a170af4-6f0c-4e7b-b70c-e674edf619df",
"type" : "password",
@@ -926,6 +932,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "service-account@status.im",
"serviceAccountClientId" : "spiffworkflow-backend",
"credentials" : [ ],
"disableableCredentialTypes" : [ ],
@@ -943,6 +950,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
+ "email" : "service-account-withauth@status.im",
"serviceAccountClientId" : "withAuth",
"credentials" : [ ],
"disableableCredentialTypes" : [ ],
@@ -2166,7 +2174,7 @@
"subType" : "authenticated",
"subComponents" : { },
"config" : {
- "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ]
+ "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ]
}
}, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@@ -2184,7 +2192,7 @@
"subType" : "anonymous",
"subComponents" : { },
"config" : {
- "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
+ "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
}
}, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@@ -2274,7 +2282,7 @@
"internationalizationEnabled" : false,
"supportedLocales" : [ ],
"authenticationFlows" : [ {
- "id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3",
+ "id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2",
"alias" : "Account verification options",
"description" : "Method with which to verity the existing account",
"providerId" : "basic-flow",
@@ -2296,7 +2304,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "4da99e29-371e-4f4b-a863-e5079f30a714",
+ "id" : "ddf80243-ec40-4c21-ae94-2967d841f84c",
"alias" : "Authentication Options",
"description" : "Authentication options.",
"providerId" : "basic-flow",
@@ -2325,7 +2333,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "d398c928-e201-4e8b-ab09-289bb351cd2e",
+ "id" : "4f075680-46b7-49eb-b94c-d7425f105cb9",
"alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@@ -2347,7 +2355,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d",
+ "id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed",
"alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@@ -2369,7 +2377,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "98013bc1-e4dd-41f7-9849-1f898143b944",
+ "id" : "07536fec-8d41-4c73-845f-ca85002022e0",
"alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@@ -2391,7 +2399,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2",
+ "id" : "f123f912-71fb-4596-97f9-c0628a59413d",
"alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow",
@@ -2413,7 +2421,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "2470e6f4-9a01-476a-9057-75d78e577182",
+ "id" : "03c26cc5-366b-462d-9297-b4016f8d7c57",
"alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow",
@@ -2435,7 +2443,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9",
+ "id" : "1b4f474e-aa64-45cc-90f1-63504585d89c",
"alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow",
@@ -2458,7 +2466,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "97c83e43-cba8-4d92-b108-9181bca07a1e",
+ "id" : "38024dd6-daff-45de-8782-06b07b7bfa56",
"alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account",
"providerId" : "basic-flow",
@@ -2480,7 +2488,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "fbabd64c-20de-4b8c-bfd2-be6822572278",
+ "id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7",
"alias" : "browser",
"description" : "browser based authentication",
"providerId" : "basic-flow",
@@ -2516,7 +2524,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "0628a99f-b194-495d-8e54-cc4ca8684956",
+ "id" : "92e3571d-ac3e-4e79-a391-5315954e866f",
"alias" : "clients",
"description" : "Base authentication for clients",
"providerId" : "client-flow",
@@ -2552,7 +2560,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a",
+ "id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f",
"alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow",
@@ -2581,7 +2589,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa",
+ "id" : "95d2f1ff-6907-47ce-a93c-db462fe04844",
"alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow",
@@ -2596,7 +2604,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9",
+ "id" : "27405ee8-5730-419c-944c-a7c67edd91ce",
"alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow",
@@ -2619,7 +2627,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9",
+ "id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8",
"alias" : "forms",
"description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow",
@@ -2641,7 +2649,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a",
+ "id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9",
"alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow",
@@ -2663,7 +2671,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03",
+ "id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485",
"alias" : "registration",
"description" : "registration flow",
"providerId" : "basic-flow",
@@ -2679,7 +2687,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3",
+ "id" : "99593c1e-f2a5-4198-ad41-634694259110",
"alias" : "registration form",
"description" : "registration form",
"providerId" : "form-flow",
@@ -2715,7 +2723,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828",
+ "id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4",
"alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow",
@@ -2751,7 +2759,7 @@
"userSetupAllowed" : false
} ]
}, {
- "id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c",
+ "id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8",
"alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow",
@@ -2767,13 +2775,13 @@
} ]
} ],
"authenticatorConfig" : [ {
- "id" : "817a93da-29df-447f-ab05-cd9557e66745",
+ "id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa",
"alias" : "create unique user config",
"config" : {
"require.password.update.after.registration" : "false"
}
}, {
- "id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878",
+ "id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2",
"alias" : "review profile config",
"config" : {
"update.profile.on.first.login" : "missing"
diff --git a/conftest.py b/conftest.py
index c3af94332..b24a7ed1b 100644
--- a/conftest.py
+++ b/conftest.py
@@ -9,7 +9,7 @@ from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
-from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
@@ -47,7 +47,7 @@ def app() -> Flask:
@pytest.fixture()
def with_db_and_bpmn_file_cleanup() -> None:
"""Process_group_resource."""
- db.session.query(ActiveTaskUserModel).delete()
+ db.session.query(HumanTaskUserModel).delete()
for model in SpiffworkflowBaseDBModel._all_subclasses():
db.session.query(model).delete()
diff --git a/migrations/versions/4d75421c0af0_.py b/migrations/versions/907bcf0c3d75_.py
similarity index 90%
rename from migrations/versions/4d75421c0af0_.py
rename to migrations/versions/907bcf0c3d75_.py
index 34fa1e974..552afe485 100644
--- a/migrations/versions/4d75421c0af0_.py
+++ b/migrations/versions/907bcf0c3d75_.py
@@ -1,8 +1,8 @@
"""empty message
-Revision ID: 4d75421c0af0
+Revision ID: 907bcf0c3d75
Revises:
-Create Date: 2022-12-06 17:42:56.417673
+Create Date: 2022-12-28 13:52:13.030028
"""
from alembic import op
@@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
-revision = '4d75421c0af0'
+revision = '907bcf0c3d75'
down_revision = None
branch_labels = None
depends_on = None
@@ -72,14 +72,15 @@ def upgrade():
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
- sa.Column('uid', sa.String(length=50), nullable=True),
- sa.Column('service', sa.String(length=50), nullable=False),
+ sa.Column('service', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.String(length=255), nullable=False),
- sa.Column('name', sa.String(length=255), nullable=True),
+ sa.Column('display_name', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
+ sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
+ sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('service', 'service_id', name='service_key'),
- sa.UniqueConstraint('uid')
+ sa.UniqueConstraint('username')
)
op.create_table('message_correlation_property',
sa.Column('id', sa.Integer(), nullable=False),
@@ -174,11 +175,20 @@ def upgrade():
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
)
- op.create_table('active_task',
+ op.create_table('user_group_assignment_waiting',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('username', sa.String(length=255), nullable=False),
+ sa.Column('group_id', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique')
+ )
+ op.create_table('human_task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
- sa.Column('actual_owner_id', sa.Integer(), nullable=True),
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
+ sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
+ sa.Column('actual_owner_id', sa.Integer(), nullable=True),
sa.Column('form_file_name', sa.String(length=50), nullable=True),
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
@@ -189,12 +199,15 @@ def upgrade():
sa.Column('task_type', sa.String(length=50), nullable=True),
sa.Column('task_status', sa.String(length=50), nullable=True),
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
+ sa.Column('completed', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ),
+ sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id'),
- sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
+ sa.UniqueConstraint('task_id', 'process_instance_id', name='human_task_unique')
)
+ op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False)
op.create_table('message_correlation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
@@ -255,23 +268,20 @@ def upgrade():
sa.Column('spiff_step', sa.Integer(), nullable=False),
sa.Column('task_json', sa.JSON(), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
- sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
- sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
- sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
- op.create_table('active_task_user',
+ op.create_table('human_task_user',
sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('active_task_id', sa.Integer(), nullable=False),
+ sa.Column('human_task_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['active_task_id'], ['active_task.id'], ),
+ sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
- sa.UniqueConstraint('active_task_id', 'user_id', name='active_task_user_unique')
+ sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique')
)
- op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False)
- op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False)
+ op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False)
+ op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False)
op.create_table('message_correlation_message_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('message_instance_id', sa.Integer(), nullable=False),
@@ -291,9 +301,9 @@ def downgrade():
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
op.drop_table('message_correlation_message_instance')
- op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
- op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
- op.drop_table('active_task_user')
+ op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user')
+ op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user')
+ op.drop_table('human_task_user')
op.drop_table('spiff_step_details')
op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata')
op.drop_table('process_instance_metadata')
@@ -304,7 +314,9 @@ def downgrade():
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
op.drop_table('message_correlation')
- op.drop_table('active_task')
+ op.drop_index(op.f('ix_human_task_completed'), table_name='human_task')
+ op.drop_table('human_task')
+ op.drop_table('user_group_assignment_waiting')
op.drop_table('user_group_assignment')
op.drop_table('secret')
op.drop_table('refresh_token')
diff --git a/poetry.lock b/poetry.lock
index a23004b40..707c5b3c3 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -654,7 +654,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
-resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
+resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
[[package]]
name = "Flask-Cors"
@@ -1851,7 +1851,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
-resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134"
+resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
[[package]]
name = "SQLAlchemy"
diff --git a/src/.coverage.jason-Gazelle.473795.719220 b/src/.coverage.jason-Gazelle.473795.719220
new file mode 100644
index 000000000..3c5fc7087
Binary files /dev/null and b/src/.coverage.jason-Gazelle.473795.719220 differ
diff --git a/src/.coverage.jason-Gazelle.475245.497833 b/src/.coverage.jason-Gazelle.475245.497833
new file mode 100644
index 000000000..214df28dc
Binary files /dev/null and b/src/.coverage.jason-Gazelle.475245.497833 differ
diff --git a/src/.coverage.jason-Gazelle.476451.578823 b/src/.coverage.jason-Gazelle.476451.578823
new file mode 100644
index 000000000..ef7f5c499
Binary files /dev/null and b/src/.coverage.jason-Gazelle.476451.578823 differ
diff --git a/src/spiffworkflow_backend/__init__.py b/src/spiffworkflow_backend/__init__.py
index 9599116a2..f1de793d4 100644
--- a/src/spiffworkflow_backend/__init__.py
+++ b/src/spiffworkflow_backend/__init__.py
@@ -18,11 +18,11 @@ from werkzeug.exceptions import NotFound
import spiffworkflow_backend.load_database_models # noqa: F401
from spiffworkflow_backend.config import setup_config
+from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
openid_blueprint,
)
-from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
from spiffworkflow_backend.services.authorization_service import AuthorizationService
@@ -93,7 +93,8 @@ def create_app() -> flask.app.Flask:
if os.environ.get("FLASK_SESSION_SECRET_KEY") is None:
raise KeyError(
- "Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY"
+ "Cannot find the secret_key from the environment. Please set"
+ " FLASK_SESSION_SECRET_KEY"
)
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")
@@ -103,7 +104,6 @@ def create_app() -> flask.app.Flask:
migrate.init_app(app, db)
app.register_blueprint(user_blueprint)
- app.register_blueprint(process_api_blueprint)
app.register_blueprint(api_error_blueprint)
app.register_blueprint(admin_blueprint, url_prefix="/admin")
app.register_blueprint(openid_blueprint, url_prefix="/openid")
@@ -117,7 +117,7 @@ def create_app() -> flask.app.Flask:
]
CORS(app, origins=origins_re, max_age=3600)
- connexion_app.add_api("api.yml", base_path="/v1.0")
+ connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
mail = Mail(app)
app.config["MAIL_APP"] = mail
diff --git a/src/spiffworkflow_backend/api.yml b/src/spiffworkflow_backend/api.yml
index a97a3b11f..6c720265c 100755
--- a/src/spiffworkflow_backend/api.yml
+++ b/src/spiffworkflow_backend/api.yml
@@ -8,10 +8,6 @@ servers:
- url: http://localhost:5000/v1.0
# this is handled in flask now
security: []
-# - jwt: ["secret"]
-# - oAuth2AuthCode:
-# - read_email
-# - uid
paths:
/login:
@@ -22,7 +18,6 @@ paths:
schema:
type: string
get:
- security: []
summary: redirect to open id authentication server
operationId: spiffworkflow_backend.routes.user.login
tags:
@@ -48,7 +43,6 @@ paths:
schema:
type: string
get:
- security: []
operationId: spiffworkflow_backend.routes.user.login_return
tags:
- Authentication
@@ -68,7 +62,6 @@ paths:
schema:
type: string
get:
- security: []
operationId: spiffworkflow_backend.routes.user.logout
summary: Logout authenticated user
tags:
@@ -78,7 +71,6 @@ paths:
description: Logout Authenticated User
/logout_return:
get:
- security: []
operationId: spiffworkflow_backend.routes.user.logout_return
summary: Logout authenticated user
tags:
@@ -89,7 +81,6 @@ paths:
/login_api:
get:
- security: []
operationId: spiffworkflow_backend.routes.user.login_api
summary: Authenticate user for API access
tags:
@@ -115,7 +106,6 @@ paths:
schema:
type: string
get:
- security: []
operationId: spiffworkflow_backend.routes.user.login_api_return
tags:
- Authentication
@@ -125,8 +115,7 @@ paths:
/status:
get:
- security: []
- operationId: spiffworkflow_backend.routes.process_api_blueprint.status
+ operationId: spiffworkflow_backend.routes.health_controller.status
summary: Returns 200 if the server is Responding
tags:
- Liveness
@@ -160,7 +149,7 @@ paths:
schema:
type: integer
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_list
+ operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_list
summary: get list
tags:
- Process Groups
@@ -174,7 +163,7 @@ paths:
items:
$ref: "#/components/schemas/ProcessModelCategory"
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add
+ operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_create
summary: Add process group
tags:
- Process Groups
@@ -201,7 +190,7 @@ paths:
type: string
# process_group_show
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_show
+ operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_show
summary: Returns a single process group
tags:
- Process Groups
@@ -213,7 +202,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_delete
+ operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_delete
summary: Deletes a single process group
tags:
- Process Groups
@@ -221,7 +210,7 @@ paths:
"200":
description: The process group was deleted.
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_update
+ operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_update
summary: Updates a single process group
tags:
- Process Groups
@@ -253,7 +242,7 @@ paths:
schema:
type: string
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_move
+ operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_move
summary: returns the new group
tags:
- Process Groups
@@ -285,6 +274,12 @@ paths:
description: Get only the process models that the user can run
schema:
type: boolean
+ - name: include_parent_groups
+ in: query
+ required: false
+ description: Get the display names for the parent groups as well
+ schema:
+ type: boolean
- name: page
in: query
required: false
@@ -298,7 +293,7 @@ paths:
schema:
type: integer
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_list
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_list
summary: Return a list of process models for a given process group
tags:
- Process Models
@@ -321,7 +316,33 @@ paths:
schema:
type: string
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_create
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create
+ summary: Creates a new process model with the given parameters.
+ tags:
+ - Process Models
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ProcessModel"
+ responses:
+ "201":
+ description: Process model created successfully.
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ProcessModel"
+
+ /process-models-natural-language/{modified_process_group_id}:
+ parameters:
+ - name: modified_process_group_id
+ in: path
+ required: true
+ description: modified id of an existing process group
+ schema:
+ type: string
+ post:
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create_with_natural_language
summary: Creates a new process model with the given parameters.
tags:
- Process Models
@@ -347,7 +368,7 @@ paths:
schema:
type: string
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_create
summary: Add a new workflow spec file
tags:
- Process Model Files
@@ -377,7 +398,7 @@ paths:
schema:
type: string
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_show
summary: Returns a single process model
tags:
- Process Models
@@ -389,7 +410,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_update
summary: Modifies an existing process model with the given parameters.
tags:
- Process Models
@@ -406,7 +427,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_delete
summary: Removes an existing process model
tags:
- Process Models
@@ -433,7 +454,7 @@ paths:
schema:
type: string
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_move
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_move
summary: returns the new model
tags:
- Process Models
@@ -460,7 +481,7 @@ paths:
schema:
type: string
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_publish
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_publish
summary: Merge changes from this model to another branch.
tags:
- Process Models
@@ -509,6 +530,119 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
+ /process-instances/for-me:
+ parameters:
+ - name: process_model_identifier
+ in: query
+ required: false
+ description: The unique id of an existing process model.
+ schema:
+ type: string
+ - name: page
+ in: query
+ required: false
+ description: The page number to return. Defaults to page 1.
+ schema:
+ type: integer
+ - name: per_page
+ in: query
+ required: false
+ description: The page number to return. Defaults to page 1.
+ schema:
+ type: integer
+ - name: start_from
+ in: query
+ required: false
+ description: For filtering - beginning of start window - in seconds since epoch
+ schema:
+ type: integer
+ - name: start_to
+ in: query
+ required: false
+ description: For filtering - end of start window - in seconds since epoch
+ schema:
+ type: integer
+ - name: end_from
+ in: query
+ required: false
+ description: For filtering - beginning of end window - in seconds since epoch
+ schema:
+ type: integer
+ - name: end_to
+ in: query
+ required: false
+ description: For filtering - end of end window - in seconds since epoch
+ schema:
+ type: integer
+ - name: process_status
+ in: query
+ required: false
+ description: For filtering - not_started, user_input_required, waiting, complete, error, or suspended
+ schema:
+ type: string
+ - name: initiated_by_me
+ in: query
+ required: false
+ description: For filtering - show instances initiated by me
+ schema:
+ type: boolean
+ - name: with_tasks_completed_by_me
+ in: query
+ required: false
+ description: For filtering - show instances with tasks completed by me
+ schema:
+ type: boolean
+ - name: with_tasks_completed_by_my_group
+ in: query
+ required: false
+ description: For filtering - show instances with tasks completed by my group
+ schema:
+ type: boolean
+ - name: with_relation_to_me
+ in: query
+ required: false
+ description: For filtering - show instances that have something to do with me
+ schema:
+ type: boolean
+ - name: user_filter
+ in: query
+ required: false
+ description: For filtering - indicates the user has manually entered a query
+ schema:
+ type: boolean
+ - name: report_identifier
+ in: query
+ required: false
+ description: Specifies the identifier of a report to use, if any
+ schema:
+ type: string
+ - name: report_id
+ in: query
+ required: false
+ description: Specifies the identifier of a report to use, if any
+ schema:
+ type: integer
+ - name: user_group_identifier
+ in: query
+ required: false
+ description: The identifier of the group to get the process instances for
+ schema:
+ type: string
+ get:
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me
+ summary: Returns a list of process instances that are associated with me.
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Workflow.
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Workflow"
+
/process-instances:
parameters:
- name: process_model_identifier
@@ -577,6 +711,12 @@ paths:
description: For filtering - show instances with tasks completed by my group
schema:
type: boolean
+ - name: with_relation_to_me
+ in: query
+ required: false
+ description: For filtering - show instances that have something to do with me
+ schema:
+ type: boolean
- name: user_filter
in: query
required: false
@@ -595,9 +735,15 @@ paths:
description: Specifies the identifier of a report to use, if any
schema:
type: integer
+ - name: user_group_identifier
+ in: query
+ required: false
+ description: The identifier of the group to get the process instances for
+ schema:
+ type: string
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list
- summary: Returns a list of process instances for a given process model
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list
+ summary: Returns a list of process instances.
tags:
- Process Instances
responses:
@@ -619,7 +765,7 @@ paths:
schema:
type: string
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_create
+ operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_create
summary: Create script unit test based on given criteria
tags:
- Script Unit Test
@@ -640,7 +786,7 @@ paths:
schema:
type: string
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_run
+ operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_run
summary: Run a given script unit test.
tags:
- Script Unit Test
@@ -661,7 +807,7 @@ paths:
schema:
type: string
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_create
summary: Creates an process instance from a process model and returns the instance
tags:
- Process Instances
@@ -673,6 +819,53 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
+ /process-instances/for-me/{modified_process_model_identifier}/{process_instance_id}/task-info:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The unique id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of an existing process instance.
+ schema:
+ type: integer
+ - name: process_identifier
+ in: query
+ required: false
+ description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
+ schema:
+ type: string
+ - name: all_tasks
+ in: query
+ required: false
+ description: If true, this wil return all tasks associated with the process instance and not just user tasks.
+ schema:
+ type: boolean
+ - name: spiff_step
+ in: query
+ required: false
+ description: If set will return the tasks as they were during a specific step of execution.
+ schema:
+ type: integer
+ get:
+ tags:
+ - Process Instances
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data_for_me
+ summary: returns the list of all user tasks associated with process instance without the task data
+ responses:
+ "200":
+ description: list of tasks
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Task"
+
/process-instances/{modified_process_model_identifier}/{process_instance_id}/task-info:
parameters:
- name: modified_process_model_identifier
@@ -708,7 +901,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data
summary: returns the list of all user tasks associated with process instance without the task data
responses:
"200":
@@ -720,6 +913,60 @@ paths:
items:
$ref: "#/components/schemas/Task"
+ /process-instances/for-me/{modified_process_model_identifier}/{process_instance_id}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The unique id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of an existing process instance.
+ schema:
+ type: integer
+ - name: process_identifier
+ in: query
+ required: false
+ description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
+ schema:
+ type: string
+ get:
+ tags:
+ - Process Instances
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show_for_me
+ summary: Show information about a process instance that is associated with me
+ responses:
+ "200":
+ description: One Process Instance
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
+ /process-instances/find-by-id/{process_instance_id}:
+ parameters:
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of an existing process instance.
+ schema:
+ type: integer
+ get:
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_find_by_id
+ summary: Find a process instance based on its id only
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: One Process Instance
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
/process-instances/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_identifier
@@ -743,7 +990,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show
summary: Show information about a process instance
responses:
"200":
@@ -753,7 +1000,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_delete
summary: Deletes a single process instance
tags:
- Process Instances
@@ -780,7 +1027,7 @@ paths:
schema:
type: boolean
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_run
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_run
summary: Run a process instance
tags:
- Process Instances
@@ -792,7 +1039,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
- /process-instances/{modified_process_model_identifier}/{process_instance_id}/terminate:
+ /process-instance-terminate/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: process_instance_id
in: path
@@ -801,7 +1048,7 @@ paths:
schema:
type: integer
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_terminate
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_terminate
summary: Terminate a process instance
tags:
- Process Instances
@@ -813,7 +1060,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
- /process-instances/{modified_process_model_identifier}/{process_instance_id}/suspend:
+ /process-instance-suspend/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: process_instance_id
in: path
@@ -822,7 +1069,7 @@ paths:
schema:
type: integer
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_suspend
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_suspend
summary: Suspend a process instance
tags:
- Process Instances
@@ -834,7 +1081,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
- /process-instances/{modified_process_model_identifier}/{process_instance_id}/resume:
+ /process-instance-resume/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: process_instance_id
in: path
@@ -843,7 +1090,7 @@ paths:
schema:
type: integer
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_resume
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_resume
summary: Resume a process instance
tags:
- Process Instances
@@ -855,6 +1102,39 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
+ /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified process model id
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of an existing process instance.
+ schema:
+ type: integer
+ - name: spiff_step
+ in: query
+ required: false
+ description: Reset the process to this state
+ schema:
+ type: integer
+ post:
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset
+ summary: Reset a process instance to an earlier step
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Empty ok true response on successful resume.
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OkTrue"
+
/process-instances/reports:
parameters:
- name: page
@@ -870,7 +1150,7 @@ paths:
schema:
type: integer
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_list
summary: Returns all process instance reports for process model
tags:
- Process Instances
@@ -884,7 +1164,7 @@ paths:
items:
$ref: "#/components/schemas/Workflow"
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_create
summary: Returns all process instance reports for process model
tags:
- Process Instances
@@ -898,7 +1178,7 @@ paths:
/process-instances/reports/columns:
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list
summary: Returns all available columns for a process instance report.
tags:
- Process Instances
@@ -933,7 +1213,7 @@ paths:
schema:
type: integer
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_show
summary: Returns a report of process instances for a given process model
tags:
- Process Instances
@@ -947,7 +1227,7 @@ paths:
items:
$ref: "#/components/schemas/Workflow"
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_update
summary: Updates a process instance report
tags:
- Process Instances
@@ -959,7 +1239,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_delete
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_delete
summary: Delete a process instance report
tags:
- Process Instances
@@ -986,7 +1266,7 @@ paths:
schema:
type: string
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_show
summary: Returns metadata about the file
tags:
- Process Model Files
@@ -998,7 +1278,7 @@ paths:
schema:
$ref: "#/components/schemas/File"
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_update
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_update
summary: save the contents to the given file
tags:
- Process Model Files
@@ -1021,7 +1301,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_delete
+ operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_delete
summary: Removes an existing process model file
tags:
- Process Model Files
@@ -1050,8 +1330,7 @@ paths:
get:
tags:
- Tasks
- # security: []
- operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_my_tasks
+ operationId: spiffworkflow_backend.routes.tasks_controller.task_list_my_tasks
summary: returns the list of ready or waiting tasks for a user
responses:
"200":
@@ -1080,7 +1359,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_open_processes
+ operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_open_processes
summary: returns the list of tasks for given user's open process instances
responses:
"200":
@@ -1109,7 +1388,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_me
+ operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_me
summary: returns the list of tasks for given user's open process instances
responses:
"200":
@@ -1123,6 +1402,12 @@ paths:
/tasks/for-my-groups:
parameters:
+ - name: user_group_identifier
+ in: query
+ required: false
+ description: The identifier of the group to get the tasks for
+ schema:
+ type: string
- name: page
in: query
required: false
@@ -1138,7 +1423,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_groups
+ operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_groups
summary: returns the list of tasks for given user's open process instances
responses:
"200":
@@ -1150,6 +1435,45 @@ paths:
items:
$ref: "#/components/schemas/Task"
+ /users/search:
+ parameters:
+ - name: username_prefix
+ in: query
+ required: true
+ description: The prefix of the user
+ schema:
+ type: string
+ get:
+ tags:
+ - Users
+ operationId: spiffworkflow_backend.routes.users_controller.user_search
+ summary: Returns a list of users that the search param
+ responses:
+ "200":
+ description: list of users
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/User"
+
+ /user-groups/for-current-user:
+ get:
+ tags:
+ - User Groups
+ operationId: spiffworkflow_backend.routes.users_controller.user_group_list_for_current_user
+ summary: Group identifiers for current logged in user
+ responses:
+ "200":
+ description: list of user groups
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Task"
+
/task-data/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_identifier
@@ -1179,7 +1503,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_with_task_data
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_with_task_data
summary: returns the list of all user tasks associated with process instance with the task data
responses:
"200":
@@ -1212,7 +1536,7 @@ paths:
schema:
type: string
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update
summary: Update the task data for requested instance and task
tags:
- Process Instances
@@ -1224,11 +1548,104 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
+ /process-data/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of an existing process instance.
+ schema:
+ type: integer
+ - name: process_data_identifier
+ in: path
+ required: true
+ description: The identifier of the process data.
+ schema:
+ type: string
+ get:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_show
+ summary: Fetch the process data value.
+ tags:
+ - Data Objects
+ responses:
+ "200":
+ description: Fetch succeeded.
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
+ /send-event/{modified_process_model_identifier}/{process_instance_id}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of the process instance
+ schema:
+ type: string
+ post:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.send_bpmn_event
+ summary: Send a BPMN event to the process
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Event Sent Successfully
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
+ /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of the process instance
+ schema:
+ type: string
+ - name: task_id
+ in: path
+ required: true
+ description: The unique id of the task.
+ schema:
+ type: string
+ post:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task
+ summary: Mark a task complete without executing it
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Event Sent Successfully
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
/service-tasks:
get:
tags:
- Service Tasks
- operationId: spiffworkflow_backend.routes.process_api_blueprint.service_task_list
+ operationId: spiffworkflow_backend.routes.service_tasks_controller.service_task_list
summary: Gets all available service task connectors
responses:
"200":
@@ -1242,7 +1659,7 @@ paths:
get:
tags:
- Authentications
- operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_list
+ operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_list
summary: Gets all available authentications from connector proxy
responses:
"200":
@@ -1279,11 +1696,9 @@ paths:
schema:
type: string
get:
- # disable security so we can get the token from query params instead
- security: []
tags:
- Authentications
- operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_callback
+ operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_callback
summary: Callback to backend
responses:
"200":
@@ -1316,7 +1731,7 @@ paths:
get:
tags:
- Tasks
- operationId: spiffworkflow_backend.routes.process_api_blueprint.task_show
+ operationId: spiffworkflow_backend.routes.tasks_controller.task_show
summary: Gets one task that a user wants to complete
responses:
"200":
@@ -1328,7 +1743,7 @@ paths:
put:
tags:
- Tasks
- operationId: spiffworkflow_backend.routes.process_api_blueprint.task_submit
+ operationId: spiffworkflow_backend.routes.tasks_controller.task_submit
summary: Update the form data for a tasks
requestBody:
content:
@@ -1372,7 +1787,7 @@ paths:
get:
tags:
- Messages
- operationId: spiffworkflow_backend.routes.process_api_blueprint.message_instance_list
+ operationId: spiffworkflow_backend.routes.messages_controller.message_instance_list
summary: Get a list of message instances
responses:
"200":
@@ -1393,7 +1808,7 @@ paths:
post:
tags:
- Messages
- operationId: spiffworkflow_backend.routes.process_api_blueprint.message_start
+ operationId: spiffworkflow_backend.routes.messages_controller.message_start
summary: Instantiate and run a given process model with a message start event matching given identifier
requestBody:
content:
@@ -1437,7 +1852,7 @@ paths:
get:
tags:
- Process Instances
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_log_list
+ operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_log_list
summary: returns a list of logs associated with the process instance
responses:
"200":
@@ -1462,7 +1877,7 @@ paths:
schema:
type: integer
post:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.add_secret
+ operationId: spiffworkflow_backend.routes.secrets_controller.secret_create
summary: Create a secret for a key and value
tags:
- Secrets
@@ -1479,7 +1894,7 @@ paths:
schema:
type: number
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_list
+ operationId: spiffworkflow_backend.routes.secrets_controller.secret_list
summary: Return list of all secrets
tags:
- Secrets
@@ -1500,7 +1915,7 @@ paths:
schema:
type: string
get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.get_secret
+ operationId: spiffworkflow_backend.routes.secrets_controller.secret_show
summary: Return a secret value for a key
tags:
- Secrets
@@ -1512,7 +1927,7 @@ paths:
schema:
$ref: "#/components/schemas/Secret"
delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_secret
+ operationId: spiffworkflow_backend.routes.secrets_controller.secret_delete
summary: Delete an existing secret
tags:
- Secrets
@@ -1524,7 +1939,7 @@ paths:
"404":
description: Secret does not exist
put:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.update_secret
+ operationId: spiffworkflow_backend.routes.secrets_controller.secret_update
summary: Modify an existing secret
tags:
- Secrets
@@ -1583,16 +1998,6 @@ components:
scopes:
read_email: read email
x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope
- # oAuth2AuthCode:
- # type: oauth2
- # description: authenticate with openid server
- # flows:
- # implicit:
- # authorizationUrl: /v1.0/login_api
- # scopes:
- # uid: uid
- # x-tokenInfoUrl: localhost:7000/v1.0/login_api_return
- # x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope
schemas:
OkTrue:
diff --git a/src/spiffworkflow_backend/config/__init__.py b/src/spiffworkflow_backend/config/__init__.py
index 106b07357..fb5901f03 100644
--- a/src/spiffworkflow_backend/config/__init__.py
+++ b/src/spiffworkflow_backend/config/__init__.py
@@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None:
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
- app.config[
- "SQLALCHEMY_DATABASE_URI"
- ] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
+ app.config["SQLALCHEMY_DATABASE_URI"] = (
+ f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
+ )
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
- app.config[
- "SQLALCHEMY_DATABASE_URI"
- ] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
+ app.config["SQLALCHEMY_DATABASE_URI"] = (
+ f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
+ )
else:
# use pswd to trick flake8 with hardcoded passwords
db_pswd = os.environ.get("DB_PASSWORD")
if db_pswd is None:
db_pswd = ""
- app.config[
- "SQLALCHEMY_DATABASE_URI"
- ] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
+ app.config["SQLALCHEMY_DATABASE_URI"] = (
+ f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
+ )
else:
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
@@ -42,6 +42,7 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
"""Load_config_file."""
try:
app.config.from_object(env_config_module)
+ print(f"loaded config: {env_config_module}")
except ImportStringError as exception:
if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
raise ModuleNotFoundError(
@@ -62,6 +63,7 @@ def setup_config(app: Flask) -> None:
)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config.from_object("spiffworkflow_backend.config.default")
+ print("loaded config: default")
env_config_prefix = "spiffworkflow_backend.config."
if (
@@ -69,6 +71,7 @@ def setup_config(app: Flask) -> None:
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
):
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
+ print("loaded config: terraform_deployed_environment")
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
load_config_file(app, env_config_module)
@@ -87,6 +90,14 @@ def setup_config(app: Flask) -> None:
"permissions",
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
)
+ print(
+ "set permissions file name config:"
+ f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}"
+ )
+ print(
+ "set permissions file name full path:"
+ f" {app.config['PERMISSIONS_FILE_FULLPATH']}"
+ )
# unversioned (see .gitignore) config that can override everything and include secrets.
# src/spiffworkflow_backend/config/secrets.py
diff --git a/src/spiffworkflow_backend/config/dev.py b/src/spiffworkflow_backend/config/dev.py
index ce6b516c0..cbbc269a8 100644
--- a/src/spiffworkflow_backend/config/dev.py
+++ b/src/spiffworkflow_backend/config/dev.py
@@ -6,3 +6,4 @@ GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-commit
GIT_USER_EMAIL = environ.get(
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
)
+SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml"
diff --git a/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml b/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml
index a10b5685b..29d3c9c04 100644
--- a/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml
+++ b/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml
@@ -1,13 +1,10 @@
groups:
admin:
- users: [ciadmin1]
-
- common-user:
- users: [ciuser1]
+ users: [ciadmin1@spiffworkflow.org]
permissions:
admin:
- groups: [admin, common-user]
+ groups: [admin]
users: []
- allowed_permissions: [create, read, update, delete, list, instantiate]
+ allowed_permissions: [create, read, update, delete]
uri: /*
diff --git a/src/spiffworkflow_backend/config/permissions/dev.yml b/src/spiffworkflow_backend/config/permissions/dev.yml
new file mode 100644
index 000000000..a556c0139
--- /dev/null
+++ b/src/spiffworkflow_backend/config/permissions/dev.yml
@@ -0,0 +1,151 @@
+default_group: everybody
+
+groups:
+ admin:
+ users:
+ [
+ admin@spiffworkflow.org,
+ jakub@status.im,
+ jarrad@status.im,
+ kb@sartography.com,
+ alex@sartography.com,
+ dan@sartography.com,
+ mike@sartography.com,
+ jason@sartography.com,
+ j@sartography.com,
+ elizabeth@sartography.com,
+ jon@sartography.com,
+ ]
+
+ Finance Team:
+ users:
+ [
+ jakub@status.im,
+ amir@status.im,
+ jarrad@status.im,
+ sasha@status.im,
+ fin@status.im,
+ fin1@status.im,
+ alex@sartography.com,
+ dan@sartography.com,
+ mike@sartography.com,
+ jason@sartography.com,
+ j@sartography.com,
+ elizabeth@sartography.com,
+ jon@sartography.com,
+ ]
+
+ demo:
+ users:
+ [
+ harmeet@status.im,
+ sasha@status.im,
+ manuchehr@status.im,
+ core@status.im,
+ fin@status.im,
+ fin1@status.im,
+ lead@status.im,
+ lead1@status.im,
+ ]
+
+ test:
+ users:
+ [
+ natalia@sartography.com,
+ ]
+
+permissions:
+ admin:
+ groups: [admin]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /*
+
+ # open system defaults for everybody
+ read-all-process-groups:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-groups/*
+ read-all-process-models:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-models/*
+
+ # basic perms for everybody
+ read-all-process-instances-for-me:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/*
+ read-process-instance-reports:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /process-instances/reports/*
+ processes-read:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /processes
+ service-tasks:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /service-tasks
+ tasks-crud:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /tasks/*
+ user-groups-for-current-user:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /user-groups/for-current-user
+
+
+ finance-admin:
+ groups: ["Finance Team"]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /process-groups/manage-procurement:procurement:*
+
+ manage-revenue-streams-instances:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [create]
+ uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
+ manage-procurement-invoice-instances:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [create]
+ uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
+ manage-procurement-instances:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [create]
+ uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
+
+ manage-revenue-streams-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
+ manage-procurement-invoice-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
+ manage-procurement-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
+
+ create-test-instances:
+ groups: ["test"]
+ users: []
+ allowed_permissions: [create, read]
+ uri: /process-instances/misc:test:*
diff --git a/src/spiffworkflow_backend/config/permissions/development.yml b/src/spiffworkflow_backend/config/permissions/development.yml
index 14061da78..ee40f839b 100644
--- a/src/spiffworkflow_backend/config/permissions/development.yml
+++ b/src/spiffworkflow_backend/config/permissions/development.yml
@@ -10,61 +10,60 @@ groups:
admin:
users:
[
- admin,
- jakub,
- kb,
- alex,
- dan,
- mike,
- jason,
- jarrad,
- elizabeth,
- jon,
- natalia,
+ admin@spiffworkflow.org,
+ jakub@status.im,
+ jarrad@status.im,
+ kb@sartography.com,
+ alex@sartography.com,
+ dan@sartography.com,
+ mike@sartography.com,
+ jason@sartography.com,
+ j@sartography.com,
+ elizabeth@sartography.com,
+ jon@sartography.com,
]
Finance Team:
users:
[
- jakub,
- alex,
- dan,
- mike,
- jason,
- amir,
- jarrad,
- elizabeth,
- jon,
- natalia,
- sasha,
- fin,
- fin1,
+ jakub@status.im,
+ amir@status.im,
+ jarrad@status.im,
+ sasha@status.im,
+ fin@status.im,
+ fin1@status.im,
+ alex@sartography.com,
+ dan@sartography.com,
+ mike@sartography.com,
+ jason@sartography.com,
+ j@sartography.com,
+ elizabeth@sartography.com,
+ jon@sartography.com,
]
demo:
users:
[
- core,
- fin,
- fin1,
- harmeet,
- sasha,
- manuchehr,
- lead,
- lead1
+ harmeet@status.im,
+ sasha@status.im,
+ manuchehr@status.im,
+ core@status.im,
+ fin@status.im,
+ fin1@status.im,
+ lead@status.im,
+ lead1@status.im,
]
- core-contributor:
+ test:
users:
[
- core,
- harmeet,
+ natalia@sartography.com,
]
admin-ro:
users:
[
- j,
+ j@sartography.com,
]
permissions:
@@ -73,7 +72,6 @@ permissions:
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
-
admin-readonly:
groups: [admin-ro]
users: []
@@ -83,136 +81,93 @@ permissions:
groups: [admin-ro]
users: []
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/*
+ uri: /process-instances/*
- tasks-crud:
- groups: [everybody]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/tasks/*
- service-tasks:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/service-tasks
-
-
- # read all for everybody
+ # open system defaults for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/process-groups/*
+ uri: /process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/process-models/*
- read-all-process-instance:
+ uri: /process-models/*
+
+ # basic perms for everybody
+ read-all-process-instances-for-me:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/process-instances/*
+ uri: /process-instances/for-me/*
read-process-instance-reports:
groups: [everybody]
users: []
- allowed_permissions: [read]
- uri: /v1.0/process-instances/reports/*
+ allowed_permissions: [create, read, update, delete]
+ uri: /process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/processes
- #
- # task-data-read:
- # groups: [demo]
- # users: []
- # allowed_permissions: [read]
- # uri: /v1.0/task-data/*
+ uri: /processes
+ service-tasks:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /service-tasks
+ tasks-crud:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /tasks/*
+ user-groups-for-current-user:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /user-groups/for-current-user
- manage-procurement-admin:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/manage-procurement:*
- manage-procurement-admin-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/manage-procurement/*
- manage-procurement-admin-models:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-models/manage-procurement:*
- manage-procurement-admin-models-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-models/manage-procurement/*
- manage-procurement-admin-instances:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/manage-procurement:*
- manage-procurement-admin-instances-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/manage-procurement/*
-
finance-admin:
groups: ["Finance Team"]
users: []
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/manage-procurement:procurement:*
+ uri: /process-groups/manage-procurement:procurement:*
- manage-revenue-streams-instantiate:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create]
- uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-revenue-streams-instances:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
-
- manage-procurement-invoice-instantiate:
- groups: ["core-contributor", "demo"]
+ groups: ["demo"]
users: []
allowed_permissions: [create]
- uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:*
+ uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
-
- manage-procurement-instantiate:
- groups: ["core-contributor", "demo"]
+ groups: ["demo"]
users: []
allowed_permissions: [create]
- uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:*
+ uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
-
- core1-admin-models-instantiate:
- groups: ["core-contributor", "Finance Team"]
+ groups: ["demo"]
users: []
allowed_permissions: [create]
- uri: /v1.0/process-models/misc:category_number_one:process-model-with-form/process-instances
- core1-admin-instances:
- groups: ["core-contributor", "Finance Team"]
+ uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
+
+ manage-revenue-streams-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
+ manage-procurement-invoice-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
+ manage-procurement-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
+
+ create-test-instances:
+ groups: ["test"]
users: []
allowed_permissions: [create, read]
- uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:*
- core1-admin-instances-slash:
- groups: ["core-contributor", "Finance Team"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
+ uri: /process-instances/misc:test:*
diff --git a/src/spiffworkflow_backend/config/permissions/example.yml b/src/spiffworkflow_backend/config/permissions/example.yml
index 79bfed81d..248a400b4 100644
--- a/src/spiffworkflow_backend/config/permissions/example.yml
+++ b/src/spiffworkflow_backend/config/permissions/example.yml
@@ -2,14 +2,17 @@ default_group: everybody
users:
admin:
+ service: local_open_id
email: admin@spiffworkflow.org
password: admin
preferred_username: Admin
nelson:
+ service: local_open_id
email: nelson@spiffworkflow.org
password: nelson
preferred_username: Nelson
malala:
+ service: local_open_id
email: malala@spiffworkflow.org
password: malala
preferred_username: Malala
@@ -18,17 +21,17 @@ groups:
admin:
users:
[
- admin,
+ admin@spiffworkflow.org,
]
Education:
users:
[
- malala
+ malala@spiffworkflow.org
]
President:
users:
[
- nelson
+ nelson@spiffworkflow.org
]
permissions:
@@ -44,45 +47,44 @@ permissions:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/tasks/*
+ uri: /tasks/*
# Everyone can see everything (all groups, and processes are visible)
read-all-process-groups:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
- uri: /v1.0/process-groups/*
+ uri: /process-groups/*
read-all-process-models:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
- uri: /v1.0/process-models/*
+ uri: /process-models/*
read-all-process-instance:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
- uri: /v1.0/process-instances/*
+ uri: /process-instances/*
read-process-instance-reports:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
- uri: /v1.0/process-instances/reports/*
+ uri: /process-instances/reports/*
processes-read:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
- uri: /v1.0/processes
-
- # Members of the Education group can change they processes work.
+ uri: /processes
+ # Members of the Education group can change the processes under "education".
education-admin:
groups: ["Education", "President"]
users: []
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/education:*
+ uri: /process-groups/education:*
# Anyone can start an education process.
education-everybody:
groups: [everybody]
users: []
allowed_permissions: [create, read]
- uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
+ uri: /process-instances/misc:category_number_one:process-model-with-form/*
diff --git a/src/spiffworkflow_backend/config/permissions/qa1.yml b/src/spiffworkflow_backend/config/permissions/qa1.yml
new file mode 100644
index 000000000..049c991ed
--- /dev/null
+++ b/src/spiffworkflow_backend/config/permissions/qa1.yml
@@ -0,0 +1,12 @@
+default_group: everybody
+
+groups:
+ admin:
+ users: [admin@spiffworkflow.org]
+
+permissions:
+ admin:
+ groups: [admin]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /*
diff --git a/src/spiffworkflow_backend/config/permissions/staging.yml b/src/spiffworkflow_backend/config/permissions/staging.yml
index 982b945c6..9816ca939 100644
--- a/src/spiffworkflow_backend/config/permissions/staging.yml
+++ b/src/spiffworkflow_backend/config/permissions/staging.yml
@@ -4,57 +4,53 @@ groups:
admin:
users:
[
- admin,
- jakub,
- kb,
- alex,
- dan,
- mike,
- jason,
- j,
- jarrad,
- elizabeth,
- jon,
- natalia,
+ admin@spiffworkflow.org,
+ jakub@status.im,
+ jarrad@status.im,
+ kb@sartography.com,
+ alex@sartography.com,
+ dan@sartography.com,
+ mike@sartography.com,
+ jason@sartography.com,
+ j@sartography.com,
+ elizabeth@sartography.com,
+ jon@sartography.com,
]
Finance Team:
users:
[
- jakub,
- alex,
- dan,
- mike,
- jason,
- j,
- amir,
- jarrad,
- elizabeth,
- jon,
- natalia,
- sasha,
- fin,
- fin1,
+ jakub@status.im,
+ amir@status.im,
+ jarrad@status.im,
+ sasha@status.im,
+ fin@status.im,
+ fin1@status.im,
+ alex@sartography.com,
+ dan@sartography.com,
+ mike@sartography.com,
+ jason@sartography.com,
+ j@sartography.com,
+ elizabeth@sartography.com,
+ jon@sartography.com,
]
demo:
users:
[
- core,
- fin,
- fin1,
- harmeet,
- sasha,
- manuchehr,
- lead,
- lead1
+ harmeet@status.im,
+ sasha@status.im,
+ manuchehr@status.im,
+ core@status.im,
+ fin@status.im,
+ fin1@status.im,
+ lead@status.im,
+ lead1@status.im,
]
-
- core-contributor:
+ test:
users:
[
- core,
- harmeet,
+ natalia@sartography.com,
]
permissions:
@@ -67,99 +63,86 @@ permissions:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/*
+ uri: /process-instances/*
- tasks-crud:
- groups: [everybody]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/tasks/*
-
- service-tasks:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/service-tasks
-
-
- # read all for everybody
+ # open system defaults for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/process-groups/*
+ uri: /process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/process-models/*
- read-all-process-instance:
+ uri: /process-models/*
+
+ # basic perms for everybody
+ read-all-process-instances-for-me:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/process-instances/*
+ uri: /process-instances/for-me/*
read-process-instance-reports:
groups: [everybody]
users: []
- allowed_permissions: [read]
- uri: /v1.0/process-instances/reports/*
+ allowed_permissions: [create, read, update, delete]
+ uri: /process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/processes
-
-
- manage-procurement-admin-instances:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/manage-procurement:*
- manage-procurement-admin-instances-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/manage-procurement/*
- manage-procurement-admin-instance-logs:
- groups: ["Project Lead"]
+ uri: /processes
+ service-tasks:
+ groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/logs/manage-procurement:*
- manage-procurement-admin-instance-logs-slash:
- groups: ["Project Lead"]
+ uri: /service-tasks
+ tasks-crud:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [create, read, update, delete]
+ uri: /tasks/*
+ user-groups-for-current-user:
+ groups: [everybody]
users: []
allowed_permissions: [read]
- uri: /v1.0/logs/manage-procurement/*
+ uri: /user-groups/for-current-user
manage-revenue-streams-instances:
- groups: ["core-contributor", "demo"]
+ groups: ["demo"]
users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
- manage-revenue-streams-instance-logs:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/logs/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
-
+ allowed_permissions: [create]
+ uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
- groups: ["core-contributor", "demo"]
+ groups: ["demo"]
users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
- manage-procurement-invoice-instance-logs:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/logs/manage-procurement:procurement:core-contributor-invoice-management:*
-
+ allowed_permissions: [create]
+ uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
- groups: ["core-contributor", "demo"]
+ groups: ["demo"]
users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
- manage-procurement-instance-logs:
- groups: ["core-contributor", "demo"]
+ allowed_permissions: [create]
+ uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
+
+ manage-revenue-streams-instances-for-me:
+ groups: ["demo"]
users: []
allowed_permissions: [read]
- uri: /v1.0/logs/manage-procurement:vendor-lifecycle-management:*
+ uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
+ manage-procurement-invoice-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
+ manage-procurement-instances-for-me:
+ groups: ["demo"]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
+
+ create-test-instances:
+ groups: ["test"]
+ users: []
+ allowed_permissions: [create, read]
+ uri: /process-instances/misc:test:*
diff --git a/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml b/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml
index 731de9ab0..049c991ed 100644
--- a/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml
+++ b/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml
@@ -2,60 +2,7 @@ default_group: everybody
groups:
admin:
- users:
- [
- admin,
- jakub,
- kb,
- alex,
- dan,
- mike,
- jason,
- j,
- jarrad,
- elizabeth,
- jon,
- natalia,
- ]
-
- Finance Team:
- users:
- [
- jakub,
- alex,
- dan,
- mike,
- jason,
- j,
- amir,
- jarrad,
- elizabeth,
- jon,
- natalia,
- sasha,
- fin,
- fin1,
- ]
-
- demo:
- users:
- [
- core,
- fin,
- fin1,
- harmeet,
- sasha,
- manuchehr,
- lead,
- lead1
- ]
-
- core-contributor:
- users:
- [
- core,
- harmeet,
- ]
+ users: [admin@spiffworkflow.org]
permissions:
admin:
@@ -63,105 +10,3 @@ permissions:
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
-
- tasks-crud:
- groups: [everybody]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/tasks/*
-
- service-tasks:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/service-tasks
-
-
- # read all for everybody
- read-all-process-groups:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/process-groups/*
- read-all-process-models:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/process-models/*
- read-all-process-instance:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/process-instances/*
- read-process-instance-reports:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/process-instances/reports/*
- processes-read:
- groups: [everybody]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/processes
-
- task-data-read:
- groups: [demo]
- users: []
- allowed_permissions: [read]
- uri: /v1.0/task-data/*
-
-
- manage-procurement-admin:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/manage-procurement:*
- manage-procurement-admin-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/manage-procurement/*
- manage-procurement-admin-models:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-models/manage-procurement:*
- manage-procurement-admin-models-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-models/manage-procurement/*
- manage-procurement-admin-instances:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/manage-procurement:*
- manage-procurement-admin-instances-slash:
- groups: ["Project Lead"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/manage-procurement/*
-
- finance-admin:
- groups: ["Finance Team"]
- users: []
- allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/manage-procurement:procurement:*
-
- manage-revenue-streams-instances:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
-
- manage-procurement-invoice-instances:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
-
- manage-procurement-instances:
- groups: ["core-contributor", "demo"]
- users: []
- allowed_permissions: [create, read]
- uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
diff --git a/src/spiffworkflow_backend/config/permissions/testing.yml b/src/spiffworkflow_backend/config/permissions/testing.yml
index c678205df..d3edf0a8a 100644
--- a/src/spiffworkflow_backend/config/permissions/testing.yml
+++ b/src/spiffworkflow_backend/config/permissions/testing.yml
@@ -1,5 +1,12 @@
default_group: everybody
+users:
+ testadmin1:
+ service: https://testing/openid/thing
+ email: testadmin1@spiffworkflow.org
+ password: admin
+ preferred_username: El administrador de la muerte
+
groups:
admin:
users: [testadmin1, testadmin2]
@@ -14,7 +21,7 @@ permissions:
admin:
groups: [admin]
users: []
- allowed_permissions: [create, read, update, delete, list, instantiate]
+ allowed_permissions: [create, read, update, delete]
uri: /*
read-all:
@@ -23,33 +30,39 @@ permissions:
allowed_permissions: [read]
uri: /*
+ process-instances-find-by-id:
+ groups: [everybody]
+ users: []
+ allowed_permissions: [read]
+ uri: /process-instances/find-by-id/*
+
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/tasks/*
+ uri: /tasks/*
# TODO: all uris should really have the same structure
finance-admin-group:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-groups/finance/*
+ uri: /process-groups/finance/*
finance-admin-model:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-models/finance/*
+ uri: /process-models/finance/*
finance-admin-model-lanes:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-models/finance:model_with_lanes/*
+ uri: /process-models/finance:model_with_lanes/*
finance-admin-instance-run:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
- uri: /v1.0/process-instances/*
+ uri: /process-instances/*
diff --git a/src/spiffworkflow_backend/config/qa1.py b/src/spiffworkflow_backend/config/qa1.py
new file mode 100644
index 000000000..2f8ad5fca
--- /dev/null
+++ b/src/spiffworkflow_backend/config/qa1.py
@@ -0,0 +1,11 @@
+"""Qa1."""
+from os import environ
+
+GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2")
+GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
+GIT_USER_EMAIL = environ.get(
+ "GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
+)
+SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
+ "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
+)
diff --git a/src/spiffworkflow_backend/config/staging.py b/src/spiffworkflow_backend/config/staging.py
index 9cc247056..807163315 100644
--- a/src/spiffworkflow_backend/config/staging.py
+++ b/src/spiffworkflow_backend/config/staging.py
@@ -1,7 +1,7 @@
"""Staging."""
from os import environ
-GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
+GIT_BRANCH = environ.get("GIT_BRANCH", default="staging")
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
GIT_COMMIT_ON_SAVE = False
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"
diff --git a/src/spiffworkflow_backend/helpers/api_version.py b/src/spiffworkflow_backend/helpers/api_version.py
new file mode 100644
index 000000000..607b6c16b
--- /dev/null
+++ b/src/spiffworkflow_backend/helpers/api_version.py
@@ -0,0 +1,2 @@
+"""Api_version."""
+V1_API_PATH_PREFIX = "/v1.0"
diff --git a/src/spiffworkflow_backend/interfaces.py b/src/spiffworkflow_backend/interfaces.py
new file mode 100644
index 000000000..3d5280420
--- /dev/null
+++ b/src/spiffworkflow_backend/interfaces.py
@@ -0,0 +1,24 @@
+"""Interfaces."""
+from typing import NewType
+from typing import TYPE_CHECKING
+from typing import TypedDict
+
+if TYPE_CHECKING:
+ from spiffworkflow_backend.models.process_group import ProcessGroup
+
+
+IdToProcessGroupMapping = NewType("IdToProcessGroupMapping", dict[str, "ProcessGroup"])
+
+
+class ProcessGroupLite(TypedDict):
+ """ProcessGroupLite."""
+
+ id: str
+ display_name: str
+
+
+class ProcessGroupLitesWithCache(TypedDict):
+ """ProcessGroupLitesWithCache."""
+
+ cache: dict[str, "ProcessGroup"]
+ process_groups: list[ProcessGroupLite]
diff --git a/src/spiffworkflow_backend/load_database_models.py b/src/spiffworkflow_backend/load_database_models.py
index 71adb57c6..bc79a8e39 100644
--- a/src/spiffworkflow_backend/load_database_models.py
+++ b/src/spiffworkflow_backend/load_database_models.py
@@ -17,7 +17,7 @@ from spiffworkflow_backend.models.user_group_assignment import (
from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401
-from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401
+from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401
from spiffworkflow_backend.models.spec_reference import (
SpecReferenceCache,
) # noqa: F401
diff --git a/src/spiffworkflow_backend/models/group.py b/src/spiffworkflow_backend/models/group.py
index 3b7edd6ce..980fc9302 100644
--- a/src/spiffworkflow_backend/models/group.py
+++ b/src/spiffworkflow_backend/models/group.py
@@ -27,6 +27,9 @@ class GroupModel(FlaskBpmnGroupModel):
identifier = db.Column(db.String(255))
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
+ user_group_assignments_waiting = relationship( # type: ignore
+ "UserGroupAssignmentWaitingModel", cascade="delete"
+ )
users = relationship( # type: ignore
"UserModel",
viewonly=True,
diff --git a/src/spiffworkflow_backend/models/active_task.py b/src/spiffworkflow_backend/models/human_task.py
similarity index 71%
rename from src/spiffworkflow_backend/models/active_task.py
rename to src/spiffworkflow_backend/models/human_task.py
index ea9e10552..f74da5cca 100644
--- a/src/spiffworkflow_backend/models/active_task.py
+++ b/src/spiffworkflow_backend/models/human_task.py
@@ -1,4 +1,4 @@
-"""Active_task."""
+"""Human_task."""
from __future__ import annotations
from dataclasses import dataclass
@@ -8,7 +8,6 @@ from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
-from sqlalchemy.orm import RelationshipProperty
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@@ -17,29 +16,30 @@ from spiffworkflow_backend.models.user import UserModel
if TYPE_CHECKING:
- from spiffworkflow_backend.models.active_task_user import ( # noqa: F401
- ActiveTaskUserModel,
+ from spiffworkflow_backend.models.human_task_user import ( # noqa: F401
+ HumanTaskUserModel,
)
@dataclass
-class ActiveTaskModel(SpiffworkflowBaseDBModel):
- """ActiveTaskModel."""
+class HumanTaskModel(SpiffworkflowBaseDBModel):
+ """HumanTaskModel."""
- __tablename__ = "active_task"
+ __tablename__ = "human_task"
__table_args__ = (
- db.UniqueConstraint(
- "task_id", "process_instance_id", name="active_task_unique"
- ),
+ db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"),
)
- actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
- actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id))
+ completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore
+
+ actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) # type: ignore
+ # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
+
form_file_name: str | None = db.Column(db.String(50))
ui_form_file_name: str | None = db.Column(db.String(50))
@@ -52,17 +52,18 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
task_type: str = db.Column(db.String(50))
task_status: str = db.Column(db.String(50))
process_model_display_name: str = db.Column(db.String(255))
+ completed: bool = db.Column(db.Boolean, default=False, nullable=False, index=True)
- active_task_users = relationship("ActiveTaskUserModel", cascade="delete")
+ human_task_users = relationship("HumanTaskUserModel", cascade="delete")
potential_owners = relationship( # type: ignore
"UserModel",
viewonly=True,
- secondary="active_task_user",
- overlaps="active_task_user,users",
+ secondary="human_task_user",
+ overlaps="human_task_user,users",
)
@classmethod
- def to_task(cls, task: ActiveTaskModel) -> Task:
+ def to_task(cls, task: HumanTaskModel) -> Task:
"""To_task."""
new_task = Task(
task.task_id,
@@ -79,7 +80,7 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
if hasattr(task, "process_model_identifier"):
new_task.process_model_identifier = task.process_model_identifier
- # active tasks only have status when getting the list on the home page
+ # human tasks only have status when getting the list on the home page
# and it comes from the process_instance. it should not be confused with task_status.
if hasattr(task, "status"):
new_task.process_instance_status = task.status
diff --git a/src/spiffworkflow_backend/models/active_task_user.py b/src/spiffworkflow_backend/models/human_task_user.py
similarity index 53%
rename from src/spiffworkflow_backend/models/active_task_user.py
rename to src/spiffworkflow_backend/models/human_task_user.py
index f194c38e4..31823af82 100644
--- a/src/spiffworkflow_backend/models/active_task_user.py
+++ b/src/spiffworkflow_backend/models/human_task_user.py
@@ -1,4 +1,4 @@
-"""Active_task_user."""
+"""Human_task_user."""
from __future__ import annotations
from dataclasses import dataclass
@@ -7,26 +7,26 @@ from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.user import UserModel
@dataclass
-class ActiveTaskUserModel(SpiffworkflowBaseDBModel):
- """ActiveTaskUserModel."""
+class HumanTaskUserModel(SpiffworkflowBaseDBModel):
+ """HumanTaskUserModel."""
- __tablename__ = "active_task_user"
+ __tablename__ = "human_task_user"
__table_args__ = (
db.UniqueConstraint(
- "active_task_id",
+ "human_task_id",
"user_id",
- name="active_task_user_unique",
+ name="human_task_user_unique",
),
)
id = db.Column(db.Integer, primary_key=True)
- active_task_id = db.Column(
- ForeignKey(ActiveTaskModel.id), nullable=False, index=True # type: ignore
+ human_task_id = db.Column(
+ ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
)
- user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
+ user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
diff --git a/src/spiffworkflow_backend/models/message_instance.py b/src/spiffworkflow_backend/models/message_instance.py
index 2559a6352..b0cc2aa34 100644
--- a/src/spiffworkflow_backend/models/message_instance.py
+++ b/src/spiffworkflow_backend/models/message_instance.py
@@ -86,5 +86,6 @@ def ensure_failure_cause_is_set_if_message_instance_failed(
if isinstance(instance, MessageInstanceModel):
if instance.status == "failed" and instance.failure_cause is None:
raise ValueError(
- f"{instance.__class__.__name__}: failure_cause must be set if status is failed"
+ f"{instance.__class__.__name__}: failure_cause must be set if"
+ " status is failed"
)
diff --git a/src/spiffworkflow_backend/models/permission_assignment.py b/src/spiffworkflow_backend/models/permission_assignment.py
index 63295f74e..04dfb5fac 100644
--- a/src/spiffworkflow_backend/models/permission_assignment.py
+++ b/src/spiffworkflow_backend/models/permission_assignment.py
@@ -32,14 +32,6 @@ class Permission(enum.Enum):
update = "update"
delete = "delete"
- # maybe read to GET process_model/process-instances instead?
- list = "list"
-
- # maybe use create instead on
- # POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/*
- # POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/332/run
- instantiate = "instantiate" # this is something you do to a process model
-
class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
"""PermissionAssignmentModel."""
diff --git a/src/spiffworkflow_backend/models/principal.py b/src/spiffworkflow_backend/models/principal.py
index c7efa8609..ac8ee6a4e 100644
--- a/src/spiffworkflow_backend/models/principal.py
+++ b/src/spiffworkflow_backend/models/principal.py
@@ -27,7 +27,7 @@ class PrincipalModel(SpiffworkflowBaseDBModel):
__table_args__ = (CheckConstraint("NOT(user_id IS NULL AND group_id IS NULL)"),)
id = db.Column(db.Integer, primary_key=True)
- user_id = db.Column(ForeignKey(UserModel.id), nullable=True, unique=True)
+ user_id = db.Column(ForeignKey(UserModel.id), nullable=True, unique=True) # type: ignore
group_id = db.Column(ForeignKey(GroupModel.id), nullable=True, unique=True)
user = relationship("UserModel", viewonly=True)
diff --git a/src/spiffworkflow_backend/models/process_group.py b/src/spiffworkflow_backend/models/process_group.py
index 1439b0459..63c851a5b 100644
--- a/src/spiffworkflow_backend/models/process_group.py
+++ b/src/spiffworkflow_backend/models/process_group.py
@@ -11,6 +11,7 @@ import marshmallow
from marshmallow import post_load
from marshmallow import Schema
+from spiffworkflow_backend.interfaces import ProcessGroupLite
from spiffworkflow_backend.models.process_model import ProcessModelInfo
@@ -29,7 +30,7 @@ class ProcessGroup:
default_factory=list[ProcessModelInfo]
)
process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"])
- parent_groups: list[dict] | None = None
+ parent_groups: list[ProcessGroupLite] | None = None
def __post_init__(self) -> None:
"""__post_init__."""
diff --git a/src/spiffworkflow_backend/models/process_instance.py b/src/spiffworkflow_backend/models/process_instance.py
index c89f457b0..31912c306 100644
--- a/src/spiffworkflow_backend/models/process_instance.py
+++ b/src/spiffworkflow_backend/models/process_instance.py
@@ -26,34 +26,12 @@ class ProcessInstanceNotFoundError(Exception):
"""ProcessInstanceNotFoundError."""
-class NavigationItemSchema(Schema):
- """NavigationItemSchema."""
+class ProcessInstanceTaskDataCannotBeUpdatedError(Exception):
+ """ProcessInstanceTaskDataCannotBeUpdatedError."""
- class Meta:
- """Meta."""
- fields = [
- "spec_id",
- "name",
- "spec_type",
- "task_id",
- "description",
- "backtracks",
- "indent",
- "lane",
- "state",
- "children",
- ]
- unknown = INCLUDE
-
- state = marshmallow.fields.String(required=False, allow_none=True)
- description = marshmallow.fields.String(required=False, allow_none=True)
- backtracks = marshmallow.fields.String(required=False, allow_none=True)
- lane = marshmallow.fields.String(required=False, allow_none=True)
- task_id = marshmallow.fields.String(required=False, allow_none=True)
- children = marshmallow.fields.List(
- marshmallow.fields.Nested(lambda: NavigationItemSchema())
- )
+class ProcessInstanceCannotBeDeletedError(Exception):
+ """ProcessInstanceCannotBeDeletedError."""
class ProcessInstanceStatus(SpiffEnum):
@@ -79,10 +57,22 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
process_model_display_name: str = db.Column(
db.String(255), nullable=False, index=True
)
- process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
+ process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
process_initiator = relationship("UserModel")
- active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
+ active_human_tasks = relationship(
+ "HumanTaskModel",
+ primaryjoin=(
+ "and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id,"
+ " HumanTaskModel.completed == False)"
+ ),
+ ) # type: ignore
+
+ human_tasks = relationship(
+ "HumanTaskModel",
+ cascade="delete",
+ overlaps="active_human_tasks",
+ ) # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
@@ -131,6 +121,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"""Validate_status."""
return self.validate_enum_field(key, value, ProcessInstanceStatus)
+ def can_submit_task(self) -> bool:
+ """Can_submit_task."""
+ return not self.has_terminal_status() and self.status != "suspended"
+
+ def has_terminal_status(self) -> bool:
+ """Has_terminal_status."""
+ return self.status in self.terminal_statuses()
+
+ @classmethod
+ def terminal_statuses(cls) -> list[str]:
+ """Terminal_statuses."""
+ return ["complete", "error", "terminated"]
+
class ProcessInstanceModelSchema(Schema):
"""ProcessInstanceModelSchema."""
diff --git a/src/spiffworkflow_backend/models/process_instance_report.py b/src/spiffworkflow_backend/models/process_instance_report.py
index 1f22a3830..b1288b3f1 100644
--- a/src/spiffworkflow_backend/models/process_instance_report.py
+++ b/src/spiffworkflow_backend/models/process_instance_report.py
@@ -70,7 +70,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore
- created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
+ created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer)
updated_at_in_seconds = db.Column(db.Integer)
diff --git a/src/spiffworkflow_backend/models/process_model.py b/src/spiffworkflow_backend/models/process_model.py
index e8d5eed1c..5e0ba6ca0 100644
--- a/src/spiffworkflow_backend/models/process_model.py
+++ b/src/spiffworkflow_backend/models/process_model.py
@@ -11,6 +11,7 @@ import marshmallow
from marshmallow import Schema
from marshmallow.decorators import post_load
+from spiffworkflow_backend.interfaces import ProcessGroupLite
from spiffworkflow_backend.models.file import File
@@ -37,7 +38,7 @@ class ProcessModelInfo:
files: list[File] | None = field(default_factory=list[File])
fault_or_suspend_on_exception: str = NotificationType.fault.value
exception_notification_addresses: list[str] = field(default_factory=list)
- parent_groups: list[dict] | None = None
+ parent_groups: list[ProcessGroupLite] | None = None
metadata_extraction_paths: list[dict[str, str]] | None = None
def __post_init__(self) -> None:
@@ -57,6 +58,14 @@ class ProcessModelInfo:
"""Id_for_file_path."""
return self.id.replace("/", os.sep)
+ @classmethod
+ def modify_process_identifier_for_path_param(cls, identifier: str) -> str:
+ """Identifier."""
+ if "\\" in identifier:
+ raise Exception(f"Found backslash in identifier: {identifier}")
+
+ return identifier.replace("/", ":")
+
class ProcessModelInfoSchema(Schema):
"""ProcessModelInfoSchema."""
diff --git a/src/spiffworkflow_backend/models/secret_model.py b/src/spiffworkflow_backend/models/secret_model.py
index 92fd470a3..91a4f23bb 100644
--- a/src/spiffworkflow_backend/models/secret_model.py
+++ b/src/spiffworkflow_backend/models/secret_model.py
@@ -17,7 +17,7 @@ class SecretModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True)
key: str = db.Column(db.String(50), unique=True, nullable=False)
value: str = db.Column(db.Text(), nullable=False)
- user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
+ user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
diff --git a/src/spiffworkflow_backend/models/spiff_step_details.py b/src/spiffworkflow_backend/models/spiff_step_details.py
index 9afb5d078..11c3aeada 100644
--- a/src/spiffworkflow_backend/models/spiff_step_details.py
+++ b/src/spiffworkflow_backend/models/spiff_step_details.py
@@ -1,13 +1,11 @@
"""Spiff_step_details."""
from dataclasses import dataclass
-from typing import Optional
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
-from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@@ -20,10 +18,13 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
+ # human_task_id: int = db.Column(
+ # ForeignKey(HumanTaskModel.id) # type: ignore
+ # )
spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
- completed_by_user_id: int = db.Column(db.Integer, nullable=True)
- lane_assignment_id: Optional[int] = db.Column(
- ForeignKey(GroupModel.id), nullable=True
- )
+ # completed_by_user_id: int = db.Column(db.Integer, nullable=True)
+ # lane_assignment_id: Optional[int] = db.Column(
+ # ForeignKey(GroupModel.id), nullable=True
+ # )
diff --git a/src/spiffworkflow_backend/models/task.py b/src/spiffworkflow_backend/models/task.py
index 60deda842..5c924196a 100644
--- a/src/spiffworkflow_backend/models/task.py
+++ b/src/spiffworkflow_backend/models/task.py
@@ -43,8 +43,8 @@ class Task:
FIELD_TYPE_EMAIL = "email" # email: Email address
FIELD_TYPE_URL = "url" # url: Website address
- FIELD_PROP_AUTO_COMPLETE_MAX = (
- "autocomplete_num" # Not used directly, passed in from the front end.
+ FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end.
+ "autocomplete_num"
)
# Required field
@@ -77,8 +77,8 @@ class Task:
# File specific field properties
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
- FIELD_PROP_FILE_DATA = (
- "file_data" # to associate a bit of data with a specific file upload file.
+ FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file.
+ "file_data"
)
# Additional properties
@@ -118,7 +118,9 @@ class Task:
form_schema: Union[str, None] = None,
form_ui_schema: Union[str, None] = None,
parent: Optional[str] = None,
+ event_definition: Union[dict[str, Any], None] = None,
call_activity_process_identifier: Optional[str] = None,
+ calling_subprocess_task_id: Optional[str] = None,
):
"""__init__."""
self.id = id
@@ -130,7 +132,9 @@ class Task:
self.documentation = documentation
self.lane = lane
self.parent = parent
+ self.event_definition = event_definition
self.call_activity_process_identifier = call_activity_process_identifier
+ self.calling_subprocess_task_id = calling_subprocess_task_id
self.data = data
if self.data is None:
@@ -189,7 +193,9 @@ class Task:
"form_schema": self.form_schema,
"form_ui_schema": self.form_ui_schema,
"parent": self.parent,
+ "event_definition": self.event_definition,
"call_activity_process_identifier": self.call_activity_process_identifier,
+ "calling_subprocess_task_id": self.calling_subprocess_task_id,
}
@classmethod
@@ -290,6 +296,7 @@ class TaskSchema(Schema):
"process_instance_id",
"form_schema",
"form_ui_schema",
+ "event_definition",
]
multi_instance_type = EnumField(MultiInstanceType)
diff --git a/src/spiffworkflow_backend/models/user.py b/src/spiffworkflow_backend/models/user.py
index b8c83d0f7..c4838aafa 100644
--- a/src/spiffworkflow_backend/models/user.py
+++ b/src/spiffworkflow_backend/models/user.py
@@ -1,42 +1,41 @@
"""User."""
from __future__ import annotations
-from typing import Any
+from dataclasses import dataclass
import jwt
import marshmallow
from flask import current_app
-from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from marshmallow import Schema
from sqlalchemy.orm import relationship
-from sqlalchemy.orm import validates
from spiffworkflow_backend.models.group import GroupModel
-from spiffworkflow_backend.services.authentication_service import (
- AuthenticationProviderTypes,
-)
class UserNotFoundError(Exception):
"""UserNotFoundError."""
+@dataclass
class UserModel(SpiffworkflowBaseDBModel):
"""UserModel."""
__tablename__ = "user"
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
- id = db.Column(db.Integer, primary_key=True)
- # server and service id must be unique, not username.
- username = db.Column(db.String(255), nullable=False, unique=False)
- uid = db.Column(db.String(50), unique=True)
- service = db.Column(db.String(50), nullable=False, unique=False)
+ id: int = db.Column(db.Integer, primary_key=True)
+ username: str = db.Column(db.String(255), nullable=False, unique=True)
+
+ service = db.Column(
+ db.String(255), nullable=False, unique=False
+ ) # not 'openid' -- google, aws
service_id = db.Column(db.String(255), nullable=False, unique=False)
- name = db.Column(db.String(255))
+ display_name = db.Column(db.String(255))
email = db.Column(db.String(255))
+ updated_at_in_seconds: int = db.Column(db.Integer)
+ created_at_in_seconds: int = db.Column(db.Integer)
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") # type: ignore
groups = relationship( # type: ignore
@@ -47,21 +46,6 @@ class UserModel(SpiffworkflowBaseDBModel):
)
principal = relationship("PrincipalModel", uselist=False) # type: ignore
- @validates("service")
- def validate_service(self, key: str, value: Any) -> str:
- """Validate_service."""
- try:
- ap_type = getattr(AuthenticationProviderTypes, value, None)
- except Exception as e:
- raise ValueError(f"invalid service type: {value}") from e
- if ap_type is not None:
- ap_value: str = ap_type.value
- return ap_value
- raise ApiError(
- error_code="invalid_service",
- message=f"Could not validate service with value: {value}",
- )
-
def encode_auth_token(self) -> str:
"""Generate the Auth Token.
diff --git a/src/spiffworkflow_backend/models/user_group_assignment.py b/src/spiffworkflow_backend/models/user_group_assignment.py
index fa5b620c8..9c1567fb7 100644
--- a/src/spiffworkflow_backend/models/user_group_assignment.py
+++ b/src/spiffworkflow_backend/models/user_group_assignment.py
@@ -17,7 +17,7 @@ class UserGroupAssignmentModel(SpiffworkflowBaseDBModel):
)
id = db.Column(db.Integer, primary_key=True)
- user_id = db.Column(ForeignKey(UserModel.id), nullable=False)
+ user_id = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False)
group = relationship("GroupModel", overlaps="groups,user_group_assignments,users") # type: ignore
diff --git a/src/spiffworkflow_backend/models/user_group_assignment_waiting.py b/src/spiffworkflow_backend/models/user_group_assignment_waiting.py
new file mode 100644
index 000000000..ac2747c85
--- /dev/null
+++ b/src/spiffworkflow_backend/models/user_group_assignment_waiting.py
@@ -0,0 +1,34 @@
+"""UserGroupAssignment."""
+from flask_bpmn.models.db import db
+from flask_bpmn.models.db import SpiffworkflowBaseDBModel
+from sqlalchemy import ForeignKey
+from sqlalchemy.orm import relationship
+
+from spiffworkflow_backend.models.group import GroupModel
+
+
+class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
+ """When a user is assigned to a group, but that username does not exist.
+
+ We cache it here to be applied in the event the user does log in to the system.
+ """
+
+ MATCH_ALL_USERS = "*"
+ __tablename__ = "user_group_assignment_waiting"
+ __table_args__ = (
+ db.UniqueConstraint(
+ "username", "group_id", name="user_group_assignment_staged_unique"
+ ),
+ )
+
+ id = db.Column(db.Integer, primary_key=True)
+ username = db.Column(db.String(255), nullable=False)
+ group_id = db.Column(ForeignKey(GroupModel.id), nullable=False)
+
+ group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore
+
+ def is_match_all(self) -> bool:
+ """Is_match_all."""
+ if self.username == self.MATCH_ALL_USERS:
+ return True
+ return False
diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py b/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py
index f1223ae0d..5cb0ae89b 100644
--- a/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py
+++ b/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py
@@ -141,7 +141,7 @@ def process_model_save(process_model_id: str, file_name: str) -> Union[str, Resp
@admin_blueprint.route("/process-models//run", methods=["GET"])
def process_model_run(process_model_id: str) -> Union[str, Response]:
"""Process_model_run."""
- user = UserService.create_user("internal", "Mr. Test", username="Mr. Test")
+ user = UserService.create_user("Mr. Test", "internal", "Mr. Test")
process_instance = (
ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model_id, user
diff --git a/src/spiffworkflow_backend/routes/health_controller.py b/src/spiffworkflow_backend/routes/health_controller.py
new file mode 100644
index 000000000..e98311101
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/health_controller.py
@@ -0,0 +1,13 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+
+import flask.wrappers
+from flask.wrappers import Response
+
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+
+
+def status() -> flask.wrappers.Response:
+ """Status."""
+ ProcessInstanceModel.query.filter().first()
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
diff --git a/src/spiffworkflow_backend/routes/messages_controller.py b/src/spiffworkflow_backend/routes/messages_controller.py
new file mode 100644
index 000000000..51290770f
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/messages_controller.py
@@ -0,0 +1,176 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+from typing import Any
+from typing import Dict
+from typing import Optional
+
+import flask.wrappers
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask.wrappers import Response
+from flask_bpmn.api.api_error import ApiError
+
+from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
+from spiffworkflow_backend.models.message_instance import MessageInstanceModel
+from spiffworkflow_backend.models.message_model import MessageModel
+from spiffworkflow_backend.models.message_triggerable_process_model import (
+ MessageTriggerableProcessModel,
+)
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _find_process_instance_by_id_or_raise,
+)
+from spiffworkflow_backend.services.message_service import MessageService
+
+
+def message_instance_list(
+ process_instance_id: Optional[int] = None,
+ page: int = 1,
+ per_page: int = 100,
+) -> flask.wrappers.Response:
+ """Message_instance_list."""
+ # to make sure the process instance exists
+ message_instances_query = MessageInstanceModel.query
+
+ if process_instance_id:
+ message_instances_query = message_instances_query.filter_by(
+ process_instance_id=process_instance_id
+ )
+
+ message_instances = (
+ message_instances_query.order_by(
+ MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
+ MessageInstanceModel.id.desc(), # type: ignore
+ )
+ .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id)
+ .join(ProcessInstanceModel)
+ .add_columns(
+ MessageModel.identifier.label("message_identifier"),
+ ProcessInstanceModel.process_model_identifier,
+ ProcessInstanceModel.process_model_display_name,
+ )
+ .paginate(page=page, per_page=per_page, error_out=False)
+ )
+
+ for message_instance in message_instances:
+ message_correlations: dict = {}
+ for (
+ mcmi
+ ) in (
+ message_instance.MessageInstanceModel.message_correlations_message_instances
+ ):
+ mc = MessageCorrelationModel.query.filter_by(
+ id=mcmi.message_correlation_id
+ ).all()
+ for m in mc:
+ if m.name not in message_correlations:
+ message_correlations[m.name] = {}
+ message_correlations[m.name][
+ m.message_correlation_property.identifier
+ ] = m.value
+ message_instance.MessageInstanceModel.message_correlations = (
+ message_correlations
+ )
+
+ response_json = {
+ "results": message_instances.items,
+ "pagination": {
+ "count": len(message_instances.items),
+ "total": message_instances.total,
+ "pages": message_instances.pages,
+ },
+ }
+
+ return make_response(jsonify(response_json), 200)
+
+
+# body: {
+# payload: dict,
+# process_instance_id: Optional[int],
+# }
+def message_start(
+ message_identifier: str,
+ body: Dict[str, Any],
+) -> flask.wrappers.Response:
+ """Message_start."""
+ message_model = MessageModel.query.filter_by(identifier=message_identifier).first()
+ if message_model is None:
+ raise (
+ ApiError(
+ error_code="unknown_message",
+ message=f"Could not find message with identifier: {message_identifier}",
+ status_code=404,
+ )
+ )
+
+ if "payload" not in body:
+ raise (
+ ApiError(
+ error_code="missing_payload",
+ message="Body is missing payload.",
+ status_code=400,
+ )
+ )
+
+ process_instance = None
+ if "process_instance_id" in body:
+ # to make sure we have a valid process_instance_id
+ process_instance = _find_process_instance_by_id_or_raise(
+ body["process_instance_id"]
+ )
+
+ message_instance = MessageInstanceModel.query.filter_by(
+ process_instance_id=process_instance.id,
+ message_model_id=message_model.id,
+ message_type="receive",
+ status="ready",
+ ).first()
+ if message_instance is None:
+ raise (
+ ApiError(
+ error_code="cannot_find_waiting_message",
+ message=(
+ "Could not find waiting message for identifier"
+ f" {message_identifier} and process instance"
+ f" {process_instance.id}"
+ ),
+ status_code=400,
+ )
+ )
+ MessageService.process_message_receive(
+ message_instance, message_model.name, body["payload"]
+ )
+
+ else:
+ message_triggerable_process_model = (
+ MessageTriggerableProcessModel.query.filter_by(
+ message_model_id=message_model.id
+ ).first()
+ )
+
+ if message_triggerable_process_model is None:
+ raise (
+ ApiError(
+ error_code="cannot_start_message",
+ message=(
+ "Message with identifier cannot be start with message:"
+ f" {message_identifier}"
+ ),
+ status_code=400,
+ )
+ )
+
+ process_instance = MessageService.process_message_triggerable_process_model(
+ message_triggerable_process_model,
+ message_model.name,
+ body["payload"],
+ g.user,
+ )
+
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
+ )
diff --git a/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py
index f812ab034..f25100eed 100644
--- a/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py
+++ b/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py
@@ -111,6 +111,7 @@ def token() -> dict:
"iat": time.time(),
"exp": time.time() + 86400, # Expire after a day.
"sub": user_name,
+ "email": user_details["email"],
"preferred_username": user_details.get("preferred_username", user_name),
},
client_secret,
diff --git a/src/spiffworkflow_backend/routes/process_api_blueprint.py b/src/spiffworkflow_backend/routes/process_api_blueprint.py
index 616d07ca6..4a6cc1c42 100644
--- a/src/spiffworkflow_backend/routes/process_api_blueprint.py
+++ b/src/spiffworkflow_backend/routes/process_api_blueprint.py
@@ -1,139 +1,54 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
-import os
-import random
-import re
-import string
-import uuid
from typing import Any
from typing import Dict
-from typing import Optional
-from typing import TypedDict
-from typing import Union
-import connexion # type: ignore
import flask.wrappers
-import jinja2
-import werkzeug
from flask import Blueprint
from flask import current_app
from flask import g
from flask import jsonify
from flask import make_response
-from flask import redirect
from flask import request
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
-from lxml import etree # type: ignore
-from lxml.builder import ElementMaker # type: ignore
-from SpiffWorkflow.task import Task as SpiffTask # type: ignore
-from SpiffWorkflow.task import TaskState
-from sqlalchemy import and_
-from sqlalchemy import asc
-from sqlalchemy import desc
-from sqlalchemy import func
-from sqlalchemy.orm import aliased
-from sqlalchemy.orm import selectinload
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
)
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
-from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
-from spiffworkflow_backend.models.file import FileSchema
-from spiffworkflow_backend.models.group import GroupModel
-from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
-from spiffworkflow_backend.models.message_instance import MessageInstanceModel
-from spiffworkflow_backend.models.message_model import MessageModel
-from spiffworkflow_backend.models.message_triggerable_process_model import (
- MessageTriggerableProcessModel,
-)
from spiffworkflow_backend.models.principal import PrincipalModel
-from spiffworkflow_backend.models.process_group import ProcessGroup
-from spiffworkflow_backend.models.process_group import ProcessGroupSchema
-from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
-from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
-from spiffworkflow_backend.models.process_instance_metadata import (
- ProcessInstanceMetadataModel,
-)
-from spiffworkflow_backend.models.process_instance_report import (
- ProcessInstanceReportModel,
+from spiffworkflow_backend.models.process_instance import (
+ ProcessInstanceTaskDataCannotBeUpdatedError,
)
from spiffworkflow_backend.models.process_model import ProcessModelInfo
-from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
-from spiffworkflow_backend.models.secret_model import SecretModel
-from spiffworkflow_backend.models.secret_model import SecretModelSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
-from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
-from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
-from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
-from spiffworkflow_backend.models.user import UserModel
-from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
-from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.authorization_service import AuthorizationService
-from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
-from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitService
-from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
-from spiffworkflow_backend.services.process_instance_report_service import (
- ProcessInstanceReportFilter,
-)
-from spiffworkflow_backend.services.process_instance_report_service import (
- ProcessInstanceReportService,
-)
-from spiffworkflow_backend.services.process_instance_service import (
- ProcessInstanceService,
-)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
-from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
-from spiffworkflow_backend.services.secret_service import SecretService
-from spiffworkflow_backend.services.service_task_service import ServiceTaskService
-from spiffworkflow_backend.services.spec_file_service import SpecFileService
-from spiffworkflow_backend.services.user_service import UserService
-
-
-class TaskDataSelectOption(TypedDict):
- """TaskDataSelectOption."""
-
- value: str
- label: str
-
-
-class ReactJsonSchemaSelectOption(TypedDict):
- """ReactJsonSchemaSelectOption."""
-
- type: str
- title: str
- enum: list[str]
process_api_blueprint = Blueprint("process_api", __name__)
-def status() -> flask.wrappers.Response:
- """Status."""
- ProcessInstanceModel.query.filter().first()
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response:
"""Permissions_check."""
if "requests_to_check" not in body:
raise (
ApiError(
error_code="could_not_requests_to_check",
- message="The key 'requests_to_check' not found at root of request body.",
+ message=(
+ "The key 'requests_to_check' not found at root of request body."
+ ),
status_code=400,
)
)
-
response_dict: dict[str, dict[str, bool]] = {}
requests_to_check = body["requests_to_check"]
@@ -156,295 +71,6 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
return make_response(jsonify({"results": response_dict}), 200)
-def modify_process_model_id(process_model_id: str) -> str:
- """Modify_process_model_id."""
- return process_model_id.replace("/", ":")
-
-
-def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str:
- """Un_modify_modified_process_model_id."""
- return modified_process_model_identifier.replace(":", "/")
-
-
-def process_group_add(body: dict) -> flask.wrappers.Response:
- """Add_process_group."""
- process_group = ProcessGroup(**body)
- ProcessModelService.add_process_group(process_group)
- commit_and_push_to_git(
- f"User: {g.user.username} added process group {process_group.id}"
- )
- return make_response(jsonify(process_group), 201)
-
-
-def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
- """Process_group_delete."""
- process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
- ProcessModelService().process_group_delete(process_group_id)
- commit_and_push_to_git(
- f"User: {g.user.username} deleted process group {process_group_id}"
- )
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def process_group_update(
- modified_process_group_id: str, body: dict
-) -> flask.wrappers.Response:
- """Process Group Update."""
- body_include_list = ["display_name", "description"]
- body_filtered = {
- include_item: body[include_item]
- for include_item in body_include_list
- if include_item in body
- }
-
- process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
- process_group = ProcessGroup(id=process_group_id, **body_filtered)
- ProcessModelService.update_process_group(process_group)
- commit_and_push_to_git(
- f"User: {g.user.username} updated process group {process_group_id}"
- )
- return make_response(jsonify(process_group), 200)
-
-
-def process_group_list(
- process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
-) -> flask.wrappers.Response:
- """Process_group_list."""
- if process_group_identifier is not None:
- process_groups = ProcessModelService.get_process_groups(
- process_group_identifier
- )
- else:
- process_groups = ProcessModelService.get_process_groups()
- batch = ProcessModelService().get_batch(
- items=process_groups, page=page, per_page=per_page
- )
- pages = len(process_groups) // per_page
- remainder = len(process_groups) % per_page
- if remainder > 0:
- pages += 1
-
- response_json = {
- "results": ProcessGroupSchema(many=True).dump(batch),
- "pagination": {
- "count": len(batch),
- "total": len(process_groups),
- "pages": pages,
- },
- }
- return Response(json.dumps(response_json), status=200, mimetype="application/json")
-
-
-def process_group_show(
- modified_process_group_id: str,
-) -> Any:
- """Process_group_show."""
- process_group_id = un_modify_modified_process_model_id(modified_process_group_id)
- try:
- process_group = ProcessModelService.get_process_group(process_group_id)
- except ProcessEntityNotFoundError as exception:
- raise (
- ApiError(
- error_code="process_group_cannot_be_found",
- message=f"Process group cannot be found: {process_group_id}",
- status_code=400,
- )
- ) from exception
-
- process_group.parent_groups = ProcessModelService.get_parent_group_array(
- process_group.id
- )
- return make_response(jsonify(process_group), 200)
-
-
-def process_group_move(
- modified_process_group_identifier: str, new_location: str
-) -> flask.wrappers.Response:
- """Process_group_move."""
- original_process_group_id = un_modify_modified_process_model_id(
- modified_process_group_identifier
- )
- new_process_group = ProcessModelService().process_group_move(
- original_process_group_id, new_location
- )
- commit_and_push_to_git(
- f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
- )
- return make_response(jsonify(new_process_group), 200)
-
-
-def process_model_create(
- modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
-) -> flask.wrappers.Response:
- """Process_model_create."""
- body_include_list = [
- "id",
- "display_name",
- "primary_file_name",
- "primary_process_id",
- "description",
- "metadata_extraction_paths",
- ]
- body_filtered = {
- include_item: body[include_item]
- for include_item in body_include_list
- if include_item in body
- }
-
- if modified_process_group_id is None:
- raise ApiError(
- error_code="process_group_id_not_specified",
- message="Process Model could not be created when process_group_id path param is unspecified",
- status_code=400,
- )
-
- unmodified_process_group_id = un_modify_modified_process_model_id(
- modified_process_group_id
- )
- process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
- if process_group is None:
- raise ApiError(
- error_code="process_model_could_not_be_created",
- message=f"Process Model could not be created from given body because Process Group could not be found: {body}",
- status_code=400,
- )
-
- process_model_info = ProcessModelInfo(**body_filtered) # type: ignore
- if process_model_info is None:
- raise ApiError(
- error_code="process_model_could_not_be_created",
- message=f"Process Model could not be created from given body: {body}",
- status_code=400,
- )
-
- ProcessModelService.add_process_model(process_model_info)
- commit_and_push_to_git(
- f"User: {g.user.username} created process model {process_model_info.id}"
- )
- return Response(
- json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
- status=201,
- mimetype="application/json",
- )
-
-
-def process_model_delete(
- modified_process_model_identifier: str,
-) -> flask.wrappers.Response:
- """Process_model_delete."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- ProcessModelService().process_model_delete(process_model_identifier)
- commit_and_push_to_git(
- f"User: {g.user.username} deleted process model {process_model_identifier}"
- )
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def process_model_update(
- modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
-) -> Any:
- """Process_model_update."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- body_include_list = [
- "display_name",
- "primary_file_name",
- "primary_process_id",
- "description",
- "metadata_extraction_paths",
- ]
- body_filtered = {
- include_item: body[include_item]
- for include_item in body_include_list
- if include_item in body
- }
-
- process_model = get_process_model(process_model_identifier)
- ProcessModelService.update_process_model(process_model, body_filtered)
- commit_and_push_to_git(
- f"User: {g.user.username} updated process model {process_model_identifier}"
- )
- return ProcessModelInfoSchema().dump(process_model)
-
-
-def process_model_show(modified_process_model_identifier: str) -> Any:
- """Process_model_show."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_model = get_process_model(process_model_identifier)
- files = sorted(
- SpecFileService.get_files(process_model),
- key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
- )
- process_model.files = files
- for file in process_model.files:
- file.references = SpecFileService.get_references_for_file(file, process_model)
-
- process_model.parent_groups = ProcessModelService.get_parent_group_array(
- process_model.id
- )
- return make_response(jsonify(process_model), 200)
-
-
-def process_model_move(
- modified_process_model_identifier: str, new_location: str
-) -> flask.wrappers.Response:
- """Process_model_move."""
- original_process_model_id = un_modify_modified_process_model_id(
- modified_process_model_identifier
- )
- new_process_model = ProcessModelService().process_model_move(
- original_process_model_id, new_location
- )
- commit_and_push_to_git(
- f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
- )
- return make_response(jsonify(new_process_model), 200)
-
-
-def process_model_publish(
- modified_process_model_identifier: str, branch_to_update: Optional[str] = None
-) -> flask.wrappers.Response:
- """Process_model_publish."""
- if branch_to_update is None:
- branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
- process_model_identifier = un_modify_modified_process_model_id(
- modified_process_model_identifier
- )
- pr_url = GitService().publish(process_model_identifier, branch_to_update)
- data = {"ok": True, "pr_url": pr_url}
- return Response(json.dumps(data), status=200, mimetype="application/json")
-
-
-def process_model_list(
- process_group_identifier: Optional[str] = None,
- recursive: Optional[bool] = False,
- filter_runnable_by_user: Optional[bool] = False,
- page: int = 1,
- per_page: int = 100,
-) -> flask.wrappers.Response:
- """Process model list!"""
- process_models = ProcessModelService.get_process_models(
- process_group_id=process_group_identifier,
- recursive=recursive,
- filter_runnable_by_user=filter_runnable_by_user,
- )
- batch = ProcessModelService().get_batch(
- process_models, page=page, per_page=per_page
- )
- pages = len(process_models) // per_page
- remainder = len(process_models) % per_page
- if remainder > 0:
- pages += 1
- response_json = {
- "results": ProcessModelInfoSchema(many=True).dump(batch),
- "pagination": {
- "count": len(batch),
- "total": len(process_models),
- "pages": pages,
- },
- }
- return Response(json.dumps(response_json), status=200, mimetype="application/json")
-
-
def process_list() -> Any:
"""Returns a list of all known processes.
@@ -455,378 +81,96 @@ def process_list() -> Any:
return SpecReferenceSchema(many=True).dump(references)
-def get_file(modified_process_model_identifier: str, file_name: str) -> Any:
- """Get_file."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_model = get_process_model(process_model_identifier)
- files = SpecFileService.get_files(process_model, file_name)
- if len(files) == 0:
- raise ApiError(
- error_code="unknown file",
- message=f"No information exists for file {file_name}"
- f" it does not exist in workflow {process_model_identifier}.",
- status_code=404,
- )
-
- file = files[0]
- file_contents = SpecFileService.get_data(process_model, file.name)
- file.file_contents = file_contents
- file.process_model_id = process_model.id
- # file.process_group_id = process_model.process_group_id
- return FileSchema().dump(file)
-
-
-def process_model_file_update(
- modified_process_model_identifier: str, file_name: str
+def process_data_show(
+ process_instance_id: int,
+ process_data_identifier: str,
+ modified_process_model_identifier: str,
) -> flask.wrappers.Response:
- """Process_model_file_update."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_model = get_process_model(process_model_identifier)
+ """Process_data_show."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ processor = ProcessInstanceProcessor(process_instance)
+ all_process_data = processor.get_data()
+ process_data_value = None
+ if process_data_identifier in all_process_data:
+ process_data_value = all_process_data[process_data_identifier]
- request_file = get_file_from_request()
- request_file_contents = request_file.stream.read()
- if not request_file_contents:
- raise ApiError(
- error_code="file_contents_empty",
- message="Given request file does not have any content",
- status_code=400,
- )
-
- SpecFileService.update_file(process_model, file_name, request_file_contents)
- commit_and_push_to_git(
- f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
+ return make_response(
+ jsonify(
+ {
+ "process_data_identifier": process_data_identifier,
+ "process_data_value": process_data_value,
+ }
+ ),
+ 200,
)
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-def process_model_file_delete(
- modified_process_model_identifier: str, file_name: str
-) -> flask.wrappers.Response:
- """Process_model_file_delete."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_model = get_process_model(process_model_identifier)
- try:
- SpecFileService.delete_file(process_model, file_name)
- except FileNotFoundError as exception:
- raise (
- ApiError(
- error_code="process_model_file_cannot_be_found",
- message=f"Process model file cannot be found: {file_name}",
- status_code=400,
- )
- ) from exception
-
- commit_and_push_to_git(
- f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
- )
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response:
- """Add_file."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_model = get_process_model(process_model_identifier)
- request_file = get_file_from_request()
- if not request_file.filename:
- raise ApiError(
- error_code="could_not_get_filename",
- message="Could not get filename from request",
- status_code=400,
- )
-
- file = SpecFileService.add_file(
- process_model, request_file.filename, request_file.stream.read()
- )
- file_contents = SpecFileService.get_data(process_model, file.name)
- file.file_contents = file_contents
- file.process_model_id = process_model.id
- commit_and_push_to_git(
- f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}"
- )
+# sample body:
+# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
+# "full_name": "sartography/sample-process-models", "private": False .... }}
+# test with: ngrok http 7000
+# where 7000 is the port the app is running on locally
+def github_webhook_receive(body: Dict) -> Response:
+ """Github_webhook_receive."""
+ auth_header = request.headers.get("X-Hub-Signature-256")
+ AuthorizationService.verify_sha256_token(auth_header)
+ result = GitService.handle_web_hook(body)
return Response(
- json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
+ json.dumps({"git_pull": result}), status=200, mimetype="application/json"
)
-def process_instance_create(
+def task_data_update(
+ process_instance_id: str,
modified_process_model_identifier: str,
-) -> flask.wrappers.Response:
- """Create_process_instance."""
- process_model_identifier = un_modify_modified_process_model_id(
- modified_process_model_identifier
- )
- process_instance = (
- ProcessInstanceService.create_process_instance_from_process_model_identifier(
- process_model_identifier, g.user
- )
- )
- return Response(
- json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
- status=201,
- mimetype="application/json",
- )
-
-
-def process_instance_run(
- modified_process_model_identifier: str,
- process_instance_id: int,
- do_engine_steps: bool = True,
-) -> flask.wrappers.Response:
- """Process_instance_run."""
- process_instance = ProcessInstanceService().get_process_instance(
- process_instance_id
- )
- processor = ProcessInstanceProcessor(process_instance)
-
- if do_engine_steps:
- try:
- processor.do_engine_steps()
- except ApiError as e:
- ErrorHandlingService().handle_error(processor, e)
- raise e
- except Exception as e:
- ErrorHandlingService().handle_error(processor, e)
- task = processor.bpmn_process_instance.last_task
- raise ApiError.from_task(
- error_code="unknown_exception",
- message=f"An unknown error occurred. Original error: {e}",
- status_code=400,
- task=task,
- ) from e
- processor.save()
-
- if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
- MessageService.process_message_instances()
-
- process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
- processor
- )
- process_instance_data = processor.get_data()
- process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
- process_instance_metadata["data"] = process_instance_data
- return Response(
- json.dumps(process_instance_metadata), status=200, mimetype="application/json"
- )
-
-
-def process_instance_terminate(
- process_instance_id: int,
- modified_process_model_identifier: str,
-) -> flask.wrappers.Response:
- """Process_instance_run."""
- process_instance = ProcessInstanceService().get_process_instance(
- process_instance_id
- )
- processor = ProcessInstanceProcessor(process_instance)
- processor.terminate()
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def process_instance_suspend(
- process_instance_id: int,
- modified_process_model_identifier: str,
-) -> flask.wrappers.Response:
- """Process_instance_suspend."""
- process_instance = ProcessInstanceService().get_process_instance(
- process_instance_id
- )
- processor = ProcessInstanceProcessor(process_instance)
- processor.suspend()
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def process_instance_resume(
- process_instance_id: int,
- modified_process_model_identifier: str,
-) -> flask.wrappers.Response:
- """Process_instance_resume."""
- process_instance = ProcessInstanceService().get_process_instance(
- process_instance_id
- )
- processor = ProcessInstanceProcessor(process_instance)
- processor.resume()
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def process_instance_log_list(
- modified_process_model_identifier: str,
- process_instance_id: int,
- page: int = 1,
- per_page: int = 100,
- detailed: bool = False,
-) -> flask.wrappers.Response:
- """Process_instance_log_list."""
- # to make sure the process instance exists
- process_instance = find_process_instance_by_id_or_raise(process_instance_id)
-
- log_query = SpiffLoggingModel.query.filter(
- SpiffLoggingModel.process_instance_id == process_instance.id
- )
- if not detailed:
- log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
-
- logs = (
- log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
- .join(
- UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True
- ) # isouter since if we don't have a user, we still want the log
- .add_columns(
- UserModel.username,
- )
- .paginate(page=page, per_page=per_page, error_out=False)
- )
-
- response_json = {
- "results": logs.items,
- "pagination": {
- "count": len(logs.items),
- "total": logs.total,
- "pages": logs.pages,
- },
- }
-
- return make_response(jsonify(response_json), 200)
-
-
-def message_instance_list(
- process_instance_id: Optional[int] = None,
- page: int = 1,
- per_page: int = 100,
-) -> flask.wrappers.Response:
- """Message_instance_list."""
- # to make sure the process instance exists
- message_instances_query = MessageInstanceModel.query
-
- if process_instance_id:
- message_instances_query = message_instances_query.filter_by(
- process_instance_id=process_instance_id
- )
-
- message_instances = (
- message_instances_query.order_by(
- MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
- MessageInstanceModel.id.desc(), # type: ignore
- )
- .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id)
- .join(ProcessInstanceModel)
- .add_columns(
- MessageModel.identifier.label("message_identifier"),
- ProcessInstanceModel.process_model_identifier,
- ProcessInstanceModel.process_model_display_name,
- )
- .paginate(page=page, per_page=per_page, error_out=False)
- )
-
- for message_instance in message_instances:
- message_correlations: dict = {}
- for (
- mcmi
- ) in (
- message_instance.MessageInstanceModel.message_correlations_message_instances
- ):
- mc = MessageCorrelationModel.query.filter_by(
- id=mcmi.message_correlation_id
- ).all()
- for m in mc:
- if m.name not in message_correlations:
- message_correlations[m.name] = {}
- message_correlations[m.name][
- m.message_correlation_property.identifier
- ] = m.value
- message_instance.MessageInstanceModel.message_correlations = (
- message_correlations
- )
-
- response_json = {
- "results": message_instances.items,
- "pagination": {
- "count": len(message_instances.items),
- "total": message_instances.total,
- "pages": message_instances.pages,
- },
- }
-
- return make_response(jsonify(response_json), 200)
-
-
-# body: {
-# payload: dict,
-# process_instance_id: Optional[int],
-# }
-def message_start(
- message_identifier: str,
- body: Dict[str, Any],
-) -> flask.wrappers.Response:
- """Message_start."""
- message_model = MessageModel.query.filter_by(identifier=message_identifier).first()
- if message_model is None:
- raise (
- ApiError(
- error_code="unknown_message",
- message=f"Could not find message with identifier: {message_identifier}",
- status_code=404,
+ task_id: str,
+ body: Dict,
+) -> Response:
+ """Update task data."""
+ process_instance = ProcessInstanceModel.query.filter(
+ ProcessInstanceModel.id == int(process_instance_id)
+ ).first()
+ if process_instance:
+ if process_instance.status != "suspended":
+ raise ProcessInstanceTaskDataCannotBeUpdatedError(
+ "The process instance needs to be suspended to udpate the task-data."
+ f" It is currently: {process_instance.status}"
)
- )
- if "payload" not in body:
- raise (
- ApiError(
- error_code="missing_payload",
- message="Body is missing payload.",
- status_code=400,
- )
- )
-
- process_instance = None
- if "process_instance_id" in body:
- # to make sure we have a valid process_instance_id
- process_instance = find_process_instance_by_id_or_raise(
- body["process_instance_id"]
- )
-
- message_instance = MessageInstanceModel.query.filter_by(
- process_instance_id=process_instance.id,
- message_model_id=message_model.id,
- message_type="receive",
- status="ready",
- ).first()
- if message_instance is None:
- raise (
- ApiError(
- error_code="cannot_find_waiting_message",
- message=f"Could not find waiting message for identifier {message_identifier} "
- f"and process instance {process_instance.id}",
- status_code=400,
+ process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json)
+ if "new_task_data" in body:
+ new_task_data_str: str = body["new_task_data"]
+ new_task_data_dict = json.loads(new_task_data_str)
+ if task_id in process_instance_bpmn_json_dict["tasks"]:
+ process_instance_bpmn_json_dict["tasks"][task_id][
+ "data"
+ ] = new_task_data_dict
+ process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict)
+ db.session.add(process_instance)
+ try:
+ db.session.commit()
+ except Exception as e:
+ db.session.rollback()
+ raise ApiError(
+ error_code="update_task_data_error",
+ message=f"Could not update the Instance. Original error is {e}",
+ ) from e
+ else:
+ raise ApiError(
+ error_code="update_task_data_error",
+ message=(
+ f"Could not find Task: {task_id} in Instance:"
+ f" {process_instance_id}."
+ ),
)
- )
- MessageService.process_message_receive(
- message_instance, message_model.name, body["payload"]
- )
-
else:
- message_triggerable_process_model = (
- MessageTriggerableProcessModel.query.filter_by(
- message_model_id=message_model.id
- ).first()
+ raise ApiError(
+ error_code="update_task_data_error",
+ message=(
+ f"Could not update task data for Instance: {process_instance_id}, and"
+ f" Task: {task_id}."
+ ),
)
-
- if message_triggerable_process_model is None:
- raise (
- ApiError(
- error_code="cannot_start_message",
- message=f"Message with identifier cannot be start with message: {message_identifier}",
- status_code=400,
- )
- )
-
- process_instance = MessageService.process_message_triggerable_process_model(
- message_triggerable_process_model,
- message_model.name,
- body["payload"],
- g.user,
- )
-
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
@@ -834,1018 +178,91 @@ def message_start(
)
-def process_instance_list(
- process_model_identifier: Optional[str] = None,
- page: int = 1,
- per_page: int = 100,
- start_from: Optional[int] = None,
- start_to: Optional[int] = None,
- end_from: Optional[int] = None,
- end_to: Optional[int] = None,
- process_status: Optional[str] = None,
- initiated_by_me: Optional[bool] = None,
- with_tasks_completed_by_me: Optional[bool] = None,
- with_tasks_completed_by_my_group: Optional[bool] = None,
- user_filter: Optional[bool] = False,
- report_identifier: Optional[str] = None,
- report_id: Optional[int] = None,
-) -> flask.wrappers.Response:
- """Process_instance_list."""
- process_instance_report = ProcessInstanceReportService.report_with_identifier(
- g.user, report_id, report_identifier
- )
+def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any:
+ """Get_required_parameter_or_raise."""
+ return_value = None
+ if parameter in post_body:
+ return_value = post_body[parameter]
- if user_filter:
- report_filter = ProcessInstanceReportFilter(
- process_model_identifier,
- start_from,
- start_to,
- end_from,
- end_to,
- process_status.split(",") if process_status else None,
- initiated_by_me,
- with_tasks_completed_by_me,
- with_tasks_completed_by_my_group,
- )
- else:
- report_filter = (
- ProcessInstanceReportService.filter_from_metadata_with_overrides(
- process_instance_report,
- process_model_identifier,
- start_from,
- start_to,
- end_from,
- end_to,
- process_status,
- initiated_by_me,
- with_tasks_completed_by_me,
- with_tasks_completed_by_my_group,
- )
- )
-
- process_instance_query = ProcessInstanceModel.query
- # Always join that hot user table for good performance at serialization time.
- process_instance_query = process_instance_query.options(
- selectinload(ProcessInstanceModel.process_initiator)
- )
-
- if report_filter.process_model_identifier is not None:
- process_model = get_process_model(
- f"{report_filter.process_model_identifier}",
- )
-
- process_instance_query = process_instance_query.filter_by(
- process_model_identifier=process_model.id
- )
-
- # this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
- if (
- ProcessInstanceModel.start_in_seconds is None
- or ProcessInstanceModel.end_in_seconds is None
- ):
+ if return_value is None or return_value == "":
raise (
ApiError(
- error_code="unexpected_condition",
- message="Something went very wrong",
- status_code=500,
+ error_code="missing_required_parameter",
+ message=f"Parameter is missing from json request body: {parameter}",
+ status_code=400,
)
)
- if report_filter.start_from is not None:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.start_in_seconds >= report_filter.start_from
- )
- if report_filter.start_to is not None:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.start_in_seconds <= report_filter.start_to
- )
- if report_filter.end_from is not None:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.end_in_seconds >= report_filter.end_from
- )
- if report_filter.end_to is not None:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.end_in_seconds <= report_filter.end_to
- )
- if report_filter.process_status is not None:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore
- )
-
- if report_filter.initiated_by_me is True:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore
- )
- process_instance_query = process_instance_query.filter_by(
- process_initiator=g.user
- )
-
- # TODO: not sure if this is exactly what is wanted
- if report_filter.with_tasks_completed_by_me is True:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore
- )
- # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
- # process_instance_query = process_instance_query.add_columns(UserModel.username)
- # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying.
-
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.process_initiator_id != g.user.id
- )
- process_instance_query = process_instance_query.join(
- SpiffStepDetailsModel,
- ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id,
- )
- process_instance_query = process_instance_query.join(
- SpiffLoggingModel,
- ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id,
- )
- process_instance_query = process_instance_query.filter(
- SpiffLoggingModel.message.contains("COMPLETED") # type: ignore
- )
- process_instance_query = process_instance_query.filter(
- SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step
- )
- process_instance_query = process_instance_query.filter(
- SpiffStepDetailsModel.completed_by_user_id == g.user.id
- )
-
- if report_filter.with_tasks_completed_by_my_group is True:
- process_instance_query = process_instance_query.filter(
- ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore
- )
- process_instance_query = process_instance_query.join(
- SpiffStepDetailsModel,
- ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id,
- )
- process_instance_query = process_instance_query.join(
- SpiffLoggingModel,
- ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id,
- )
- process_instance_query = process_instance_query.filter(
- SpiffLoggingModel.message.contains("COMPLETED") # type: ignore
- )
- process_instance_query = process_instance_query.filter(
- SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step
- )
- process_instance_query = process_instance_query.join(
- GroupModel,
- GroupModel.id == SpiffStepDetailsModel.lane_assignment_id,
- )
- process_instance_query = process_instance_query.join(
- UserGroupAssignmentModel,
- UserGroupAssignmentModel.group_id == GroupModel.id,
- )
- process_instance_query = process_instance_query.filter(
- UserGroupAssignmentModel.user_id == g.user.id
- )
-
- instance_metadata_aliases = {}
- stock_columns = ProcessInstanceReportService.get_column_names_for_model(
- ProcessInstanceModel
- )
- for column in process_instance_report.report_metadata["columns"]:
- if column["accessor"] in stock_columns:
- continue
- instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
- instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
-
- filter_for_column = None
- if "filter_by" in process_instance_report.report_metadata:
- filter_for_column = next(
- (
- f
- for f in process_instance_report.report_metadata["filter_by"]
- if f["field_name"] == column["accessor"]
- ),
- None,
- )
- isouter = True
- conditions = [
- ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
- instance_metadata_alias.key == column["accessor"],
- ]
- if filter_for_column:
- isouter = False
- conditions.append(
- instance_metadata_alias.value == filter_for_column["field_value"]
- )
- process_instance_query = process_instance_query.join(
- instance_metadata_alias, and_(*conditions), isouter=isouter
- ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"]))
-
- order_by_query_array = []
- order_by_array = process_instance_report.report_metadata["order_by"]
- if len(order_by_array) < 1:
- order_by_array = ProcessInstanceReportModel.default_order_by()
- for order_by_option in order_by_array:
- attribute = re.sub("^-", "", order_by_option)
- if attribute in stock_columns:
- if order_by_option.startswith("-"):
- order_by_query_array.append(
- getattr(ProcessInstanceModel, attribute).desc()
- )
- else:
- order_by_query_array.append(
- getattr(ProcessInstanceModel, attribute).asc()
- )
- elif attribute in instance_metadata_aliases:
- if order_by_option.startswith("-"):
- order_by_query_array.append(
- func.max(instance_metadata_aliases[attribute].value).desc()
- )
- else:
- order_by_query_array.append(
- func.max(instance_metadata_aliases[attribute].value).asc()
- )
-
- process_instances = (
- process_instance_query.group_by(ProcessInstanceModel.id)
- .add_columns(ProcessInstanceModel.id)
- .order_by(*order_by_query_array)
- .paginate(page=page, per_page=per_page, error_out=False)
- )
-
- results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
- process_instances.items, process_instance_report.report_metadata["columns"]
- )
-
- response_json = {
- "report": process_instance_report,
- "results": results,
- "filters": report_filter.to_dict(),
- "pagination": {
- "count": len(results),
- "total": process_instances.total,
- "pages": process_instances.pages,
- },
- }
-
- return make_response(jsonify(response_json), 200)
+ return return_value
-def process_instance_report_column_list() -> flask.wrappers.Response:
- """Process_instance_report_column_list."""
- table_columns = ProcessInstanceReportService.builtin_column_options()
- columns_for_metadata = (
- db.session.query(ProcessInstanceMetadataModel.key)
- .order_by(ProcessInstanceMetadataModel.key)
- .distinct() # type: ignore
- .all()
- )
- columns_for_metadata_strings = [
- {"Header": i[0], "accessor": i[0], "filterable": True}
- for i in columns_for_metadata
- ]
- return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
-
-
-def process_instance_show(
+def send_bpmn_event(
modified_process_model_identifier: str,
- process_instance_id: int,
- process_identifier: Optional[str] = None,
-) -> flask.wrappers.Response:
- """Create_process_instance."""
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_instance = find_process_instance_by_id_or_raise(process_instance_id)
- current_version_control_revision = GitService.get_current_revision()
-
- process_model_with_diagram = None
- name_of_file_with_diagram = None
- if process_identifier:
- spec_reference = SpecReferenceCache.query.filter_by(
- identifier=process_identifier
- ).first()
- if spec_reference is None:
- raise SpecReferenceNotFoundError(
- f"Could not find given process identifier in the cache: {process_identifier}"
- )
-
- process_model_with_diagram = ProcessModelService.get_process_model(
- spec_reference.process_model_id
- )
- name_of_file_with_diagram = spec_reference.file_name
+ process_instance_id: str,
+ body: Dict,
+) -> Response:
+ """Send a bpmn event to a workflow."""
+ process_instance = ProcessInstanceModel.query.filter(
+ ProcessInstanceModel.id == int(process_instance_id)
+ ).first()
+ if process_instance:
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.send_bpmn_event(body)
else:
- process_model_with_diagram = get_process_model(process_model_identifier)
- if process_model_with_diagram.primary_file_name:
- name_of_file_with_diagram = process_model_with_diagram.primary_file_name
-
- if process_model_with_diagram and name_of_file_with_diagram:
- if (
- process_instance.bpmn_version_control_identifier
- == current_version_control_revision
- ):
- bpmn_xml_file_contents = SpecFileService.get_data(
- process_model_with_diagram, name_of_file_with_diagram
- ).decode("utf-8")
- else:
- bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
- process_model_with_diagram,
- process_instance.bpmn_version_control_identifier,
- file_name=name_of_file_with_diagram,
- )
- process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
-
- return make_response(jsonify(process_instance), 200)
-
-
-def process_instance_delete(
- process_instance_id: int, modified_process_model_identifier: str
-) -> flask.wrappers.Response:
- """Create_process_instance."""
- process_instance = find_process_instance_by_id_or_raise(process_instance_id)
-
- # (Pdb) db.session.delete
- # >
- db.session.query(SpiffLoggingModel).filter_by(
- process_instance_id=process_instance.id
- ).delete()
- db.session.query(SpiffStepDetailsModel).filter_by(
- process_instance_id=process_instance.id
- ).delete()
- db.session.delete(process_instance)
- db.session.commit()
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def process_instance_report_list(
- page: int = 1, per_page: int = 100
-) -> flask.wrappers.Response:
- """Process_instance_report_list."""
- process_instance_reports = ProcessInstanceReportModel.query.filter_by(
- created_by_id=g.user.id,
- ).all()
-
- return make_response(jsonify(process_instance_reports), 200)
-
-
-def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
- """Process_instance_report_create."""
- process_instance_report = ProcessInstanceReportModel.create_report(
- identifier=body["identifier"],
- user=g.user,
- report_metadata=body["report_metadata"],
- )
-
- return make_response(jsonify(process_instance_report), 201)
-
-
-def process_instance_report_update(
- report_id: int,
- body: Dict[str, Any],
-) -> flask.wrappers.Response:
- """Process_instance_report_create."""
- process_instance_report = ProcessInstanceReportModel.query.filter_by(
- id=report_id,
- created_by_id=g.user.id,
- ).first()
- if process_instance_report is None:
raise ApiError(
- error_code="unknown_process_instance_report",
- message="Unknown process instance report",
- status_code=404,
+ error_code="send_bpmn_event_error",
+ message=f"Could not send event to Instance: {process_instance_id}",
)
-
- process_instance_report.report_metadata = body["report_metadata"]
- db.session.commit()
-
- return make_response(jsonify(process_instance_report), 201)
-
-
-def process_instance_report_delete(
- report_id: int,
-) -> flask.wrappers.Response:
- """Process_instance_report_create."""
- process_instance_report = ProcessInstanceReportModel.query.filter_by(
- id=report_id,
- created_by_id=g.user.id,
- ).first()
- if process_instance_report is None:
- raise ApiError(
- error_code="unknown_process_instance_report",
- message="Unknown process instance report",
- status_code=404,
- )
-
- db.session.delete(process_instance_report)
- db.session.commit()
-
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def service_task_list() -> flask.wrappers.Response:
- """Service_task_list."""
- available_connectors = ServiceTaskService.available_connectors()
return Response(
- json.dumps(available_connectors), status=200, mimetype="application/json"
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
)
-def authentication_list() -> flask.wrappers.Response:
- """Authentication_list."""
- available_authentications = ServiceTaskService.authentication_list()
- response_json = {
- "results": available_authentications,
- "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"],
- "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
- }
-
- return Response(json.dumps(response_json), status=200, mimetype="application/json")
-
-
-def authentication_callback(
- service: str,
- auth_method: str,
-) -> werkzeug.wrappers.Response:
- """Authentication_callback."""
- verify_token(request.args.get("token"), force_run=True)
- response = request.args["response"]
- SecretService().update_secret(
- f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True
- )
- return redirect(
- f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration"
- )
-
-
-def process_instance_report_show(
- report_id: int,
- page: int = 1,
- per_page: int = 100,
-) -> flask.wrappers.Response:
- """Process_instance_report_show."""
- process_instances = ProcessInstanceModel.query.order_by(
- ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
- ).paginate(page=page, per_page=per_page, error_out=False)
-
- process_instance_report = ProcessInstanceReportModel.query.filter_by(
- id=report_id,
- created_by_id=g.user.id,
- ).first()
- if process_instance_report is None:
- raise ApiError(
- error_code="unknown_process_instance_report",
- message="Unknown process instance report",
- status_code=404,
- )
-
- substitution_variables = request.args.to_dict()
- result_dict = process_instance_report.generate_report(
- process_instances.items, substitution_variables
- )
-
- # update this if we go back to a database query instead of filtering in memory
- result_dict["pagination"] = {
- "count": len(result_dict["results"]),
- "total": len(result_dict["results"]),
- "pages": 1,
- }
-
- return Response(json.dumps(result_dict), status=200, mimetype="application/json")
-
-
-# TODO: see comment for before_request
-# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
-def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
- """Task_list_my_tasks."""
- principal = find_principal_or_raise()
- active_tasks = (
- ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore
- .join(ProcessInstanceModel)
- .join(ActiveTaskUserModel)
- .filter_by(user_id=principal.user_id)
- # just need this add_columns to add the process_model_identifier. Then add everything back that was removed.
- .add_columns(
- ProcessInstanceModel.process_model_identifier,
- ProcessInstanceModel.process_model_display_name,
- ProcessInstanceModel.status,
- ActiveTaskModel.task_name,
- ActiveTaskModel.task_title,
- ActiveTaskModel.task_type,
- ActiveTaskModel.task_status,
- ActiveTaskModel.task_id,
- ActiveTaskModel.id,
- ActiveTaskModel.process_model_display_name,
- ActiveTaskModel.process_instance_id,
- )
- .paginate(page=page, per_page=per_page, error_out=False)
- )
- tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items]
-
- response_json = {
- "results": tasks,
- "pagination": {
- "count": len(active_tasks.items),
- "total": active_tasks.total,
- "pages": active_tasks.pages,
- },
- }
-
- return make_response(jsonify(response_json), 200)
-
-
-def task_list_for_my_open_processes(
- page: int = 1, per_page: int = 100
-) -> flask.wrappers.Response:
- """Task_list_for_my_open_processes."""
- return get_tasks(page=page, per_page=per_page)
-
-
-def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
- """Task_list_for_processes_started_by_others."""
- return get_tasks(
- processes_started_by_user=False,
- has_lane_assignment_id=False,
- page=page,
- per_page=per_page,
- )
-
-
-def task_list_for_my_groups(
- page: int = 1, per_page: int = 100
-) -> flask.wrappers.Response:
- """Task_list_for_processes_started_by_others."""
- return get_tasks(processes_started_by_user=False, page=page, per_page=per_page)
-
-
-def get_tasks(
- processes_started_by_user: bool = True,
- has_lane_assignment_id: bool = True,
- page: int = 1,
- per_page: int = 100,
-) -> flask.wrappers.Response:
- """Get_tasks."""
- user_id = g.user.id
-
- # use distinct to ensure we only get one row per active task otherwise
- # we can get back multiple for the same active task row which throws off
- # pagination later on
- # https://stackoverflow.com/q/34582014/6090676
- active_tasks_query = (
- ActiveTaskModel.query.distinct()
- .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id)
- .join(ProcessInstanceModel)
- .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
- )
-
- if processes_started_by_user:
- active_tasks_query = active_tasks_query.filter(
- ProcessInstanceModel.process_initiator_id == user_id
- ).outerjoin(
- ActiveTaskUserModel,
- and_(
- ActiveTaskUserModel.user_id == user_id,
- ActiveTaskModel.id == ActiveTaskUserModel.active_task_id,
- ),
- )
- else:
- active_tasks_query = active_tasks_query.filter(
- ProcessInstanceModel.process_initiator_id != user_id
- ).join(
- ActiveTaskUserModel,
- and_(
- ActiveTaskUserModel.user_id == user_id,
- ActiveTaskModel.id == ActiveTaskUserModel.active_task_id,
- ),
- )
- if has_lane_assignment_id:
- active_tasks_query = active_tasks_query.filter(
- ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore
- )
- else:
- active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore
-
- active_tasks = active_tasks_query.add_columns(
- ProcessInstanceModel.process_model_identifier,
- ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
- ProcessInstanceModel.updated_at_in_seconds,
- ProcessInstanceModel.created_at_in_seconds,
- UserModel.username,
- GroupModel.identifier.label("group_identifier"),
- ActiveTaskModel.task_name,
- ActiveTaskModel.task_title,
- ActiveTaskModel.process_model_display_name,
- ActiveTaskModel.process_instance_id,
- ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"),
- ).paginate(page=page, per_page=per_page, error_out=False)
-
- response_json = {
- "results": active_tasks.items,
- "pagination": {
- "count": len(active_tasks.items),
- "total": active_tasks.total,
- "pages": active_tasks.pages,
- },
- }
- return make_response(jsonify(response_json), 200)
-
-
-def process_instance_task_list_without_task_data(
+def manual_complete_task(
modified_process_model_identifier: str,
- process_instance_id: int,
- all_tasks: bool = False,
- spiff_step: int = 0,
-) -> flask.wrappers.Response:
- """Process_instance_task_list_without_task_data."""
- return process_instance_task_list(
- modified_process_model_identifier,
- process_instance_id,
- all_tasks,
- spiff_step,
- get_task_data=False,
- )
-
-
-def process_instance_task_list_with_task_data(
- modified_process_model_identifier: str,
- process_instance_id: int,
- all_tasks: bool = False,
- spiff_step: int = 0,
-) -> flask.wrappers.Response:
- """Process_instance_task_list_with_task_data."""
- return process_instance_task_list(
- modified_process_model_identifier,
- process_instance_id,
- all_tasks,
- spiff_step,
- get_task_data=True,
- )
-
-
-def process_instance_task_list(
- _modified_process_model_identifier: str,
- process_instance_id: int,
- all_tasks: bool = False,
- spiff_step: int = 0,
- get_task_data: bool = False,
-) -> flask.wrappers.Response:
- """Process_instance_task_list."""
- process_instance = find_process_instance_by_id_or_raise(process_instance_id)
-
- if spiff_step > 0:
- step_detail = (
- db.session.query(SpiffStepDetailsModel)
- .filter(
- SpiffStepDetailsModel.process_instance_id == process_instance.id,
- SpiffStepDetailsModel.spiff_step == spiff_step,
- )
- .first()
- )
- if step_detail is not None and process_instance.bpmn_json is not None:
- bpmn_json = json.loads(process_instance.bpmn_json)
- bpmn_json["tasks"] = step_detail.task_json["tasks"]
- bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
- process_instance.bpmn_json = json.dumps(bpmn_json)
-
- processor = ProcessInstanceProcessor(process_instance)
-
- spiff_tasks = None
- if all_tasks:
- spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
- else:
- spiff_tasks = processor.get_all_user_tasks()
-
- tasks = []
- for spiff_task in spiff_tasks:
- task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
- if get_task_data:
- task.data = spiff_task.data
- tasks.append(task)
-
- return make_response(jsonify(tasks), 200)
-
-
-def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
- """Task_show."""
- process_instance = find_process_instance_by_id_or_raise(process_instance_id)
-
- if process_instance.status == ProcessInstanceStatus.suspended.value:
- raise ApiError(
- error_code="error_suspended",
- message="The process instance is suspended",
- status_code=400,
- )
-
- process_model = get_process_model(
- process_instance.process_model_identifier,
- )
-
- form_schema_file_name = ""
- form_ui_schema_file_name = ""
- spiff_task = get_spiff_task_from_process_instance(task_id, process_instance)
- extensions = spiff_task.task_spec.extensions
-
- if "properties" in extensions:
- properties = extensions["properties"]
- if "formJsonSchemaFilename" in properties:
- form_schema_file_name = properties["formJsonSchemaFilename"]
- if "formUiSchemaFilename" in properties:
- form_ui_schema_file_name = properties["formUiSchemaFilename"]
- task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
- task.data = spiff_task.data
- task.process_model_display_name = process_model.display_name
- task.process_model_identifier = process_model.id
-
- process_model_with_form = process_model
- refs = SpecFileService.get_references_for_process(process_model_with_form)
- all_processes = [i.identifier for i in refs]
- if task.process_identifier not in all_processes:
- bpmn_file_full_path = (
- ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
- task.process_identifier
- )
- )
- relative_path = os.path.relpath(
- bpmn_file_full_path, start=FileSystemService.root_path()
- )
- process_model_relative_path = os.path.dirname(relative_path)
- process_model_with_form = (
- ProcessModelService.get_process_model_from_relative_path(
- process_model_relative_path
- )
- )
-
- if task.type == "User Task":
- if not form_schema_file_name:
- raise (
- ApiError(
- error_code="missing_form_file",
- message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}",
- status_code=400,
- )
- )
-
- form_contents = prepare_form_data(
- form_schema_file_name,
- task.data,
- process_model_with_form,
- )
-
- try:
- # form_contents is a str
- form_dict = json.loads(form_contents)
- except Exception as exception:
- raise (
- ApiError(
- error_code="error_loading_form",
- message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}",
- status_code=400,
- )
- ) from exception
-
- if task.data:
- _update_form_schema_with_task_data_as_needed(form_dict, task.data)
-
- if form_contents:
- task.form_schema = form_dict
-
- if form_ui_schema_file_name:
- ui_form_contents = prepare_form_data(
- form_ui_schema_file_name,
- task.data,
- process_model_with_form,
- )
- if ui_form_contents:
- task.form_ui_schema = ui_form_contents
-
- if task.properties and task.data and "instructionsForEndUser" in task.properties:
- if task.properties["instructionsForEndUser"]:
- task.properties["instructionsForEndUser"] = render_jinja_template(
- task.properties["instructionsForEndUser"], task.data
- )
- return make_response(jsonify(task), 200)
-
-
-def task_submit(
- process_instance_id: int,
+ process_instance_id: str,
task_id: str,
- body: Dict[str, Any],
- terminate_loop: bool = False,
-) -> flask.wrappers.Response:
- """Task_submit_user_data."""
- principal = find_principal_or_raise()
- process_instance = find_process_instance_by_id_or_raise(process_instance_id)
-
- processor = ProcessInstanceProcessor(process_instance)
- spiff_task = get_spiff_task_from_process_instance(
- task_id, process_instance, processor=processor
- )
- AuthorizationService.assert_user_can_complete_spiff_task(
- process_instance.id, spiff_task, principal.user
- )
-
- if spiff_task.state != TaskState.READY:
- raise (
- ApiError(
- error_code="invalid_state",
- message="You may not update a task unless it is in the READY state.",
- status_code=400,
- )
- )
-
- if terminate_loop and spiff_task.is_looping():
- spiff_task.terminate_loop()
-
- active_task = ActiveTaskModel.query.filter_by(
- process_instance_id=process_instance_id, task_id=task_id
+ body: Dict,
+) -> Response:
+ """Mark a task complete without executing it."""
+ execute = body.get("execute", True)
+ process_instance = ProcessInstanceModel.query.filter(
+ ProcessInstanceModel.id == int(process_instance_id)
).first()
- if active_task is None:
- raise (
- ApiError(
- error_code="no_active_task",
- message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.",
- status_code=500,
- )
- )
-
- ProcessInstanceService.complete_form_task(
- processor=processor,
- spiff_task=spiff_task,
- data=body,
- user=g.user,
- active_task=active_task,
- )
-
- # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
- # task spec, complete that form as well.
- # if update_all:
- # last_index = spiff_task.task_info()["mi_index"]
- # next_task = processor.next_task()
- # while next_task and next_task.task_info()["mi_index"] > last_index:
- # __update_task(processor, next_task, form_data, user)
- # last_index = next_task.task_info()["mi_index"]
- # next_task = processor.next_task()
-
- next_active_task_assigned_to_me = (
- ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id)
- .order_by(asc(ActiveTaskModel.id)) # type: ignore
- .join(ActiveTaskUserModel)
- .filter_by(user_id=principal.user_id)
- .first()
- )
- if next_active_task_assigned_to_me:
- return make_response(
- jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200
- )
-
- return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
-
-
-def script_unit_test_create(
- modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
-) -> flask.wrappers.Response:
- """Script_unit_test_create."""
- bpmn_task_identifier = _get_required_parameter_or_raise(
- "bpmn_task_identifier", body
- )
- input_json = _get_required_parameter_or_raise("input_json", body)
- expected_output_json = _get_required_parameter_or_raise(
- "expected_output_json", body
- )
-
- process_model_identifier = modified_process_model_identifier.replace(":", "/")
- process_model = get_process_model(process_model_identifier)
- file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
- if file is None:
- raise ApiError(
- error_code="cannot_find_file",
- message=f"Could not find the primary bpmn file for process_model: {process_model.id}",
- status_code=404,
- )
-
- # TODO: move this to an xml service or something
- file_contents = SpecFileService.get_data(process_model, file.name)
- bpmn_etree_element = etree.fromstring(file_contents)
-
- nsmap = bpmn_etree_element.nsmap
- spiff_element_maker = ElementMaker(
- namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
- )
-
- script_task_elements = bpmn_etree_element.xpath(
- f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
- namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
- )
- if len(script_task_elements) == 0:
- raise ApiError(
- error_code="missing_script_task",
- message=f"Cannot find a script task with id: {bpmn_task_identifier}",
- status_code=404,
- )
- script_task_element = script_task_elements[0]
-
- extension_elements = None
- extension_elements_array = script_task_element.xpath(
- "//bpmn:extensionElements",
- namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
- )
- if len(extension_elements_array) == 0:
- bpmn_element_maker = ElementMaker(
- namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
- )
- extension_elements = bpmn_element_maker("extensionElements")
- script_task_element.append(extension_elements)
+ if process_instance:
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.manual_complete_task(task_id, execute)
else:
- extension_elements = extension_elements_array[0]
-
- unit_test_elements = None
- unit_test_elements_array = extension_elements.xpath(
- "//spiffworkflow:unitTests",
- namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"},
- )
- if len(unit_test_elements_array) == 0:
- unit_test_elements = spiff_element_maker("unitTests")
- extension_elements.append(unit_test_elements)
- else:
- unit_test_elements = unit_test_elements_array[0]
-
- fuzz = "".join(
- random.choice(string.ascii_uppercase + string.digits) # noqa: S311
- for _ in range(7)
- )
- unit_test_id = f"unit_test_{fuzz}"
-
- input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
- expected_output_json_element = spiff_element_maker(
- "expectedOutputJson", json.dumps(expected_output_json)
- )
- unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
- unit_test_element.append(input_json_element)
- unit_test_element.append(expected_output_json_element)
- unit_test_elements.append(unit_test_element)
- SpecFileService.update_file(
- process_model, file.name, etree.tostring(bpmn_etree_element)
- )
-
- return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
-
-
-def script_unit_test_run(
- modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
-) -> flask.wrappers.Response:
- """Script_unit_test_run."""
- # FIXME: We should probably clear this somewhere else but this works
- current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
- current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
-
- python_script = _get_required_parameter_or_raise("python_script", body)
- input_json = _get_required_parameter_or_raise("input_json", body)
- expected_output_json = _get_required_parameter_or_raise(
- "expected_output_json", body
- )
-
- result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
- python_script, input_json, expected_output_json
- )
- return make_response(jsonify(result), 200)
-
-
-def get_file_from_request() -> Any:
- """Get_file_from_request."""
- request_file = connexion.request.files.get("file")
- if not request_file:
raise ApiError(
- error_code="no_file_given",
- message="Given request does not contain a file",
- status_code=400,
+ error_code="complete_task",
+ message=(
+ f"Could not complete Task {task_id} in Instance {process_instance_id}"
+ ),
)
- return request_file
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
+ )
-# process_model_id uses forward slashes on all OSes
-# this seems to return an object where process_model.id has backslashes on windows
-def get_process_model(process_model_id: str) -> ProcessModelInfo:
- """Get_process_model."""
- process_model = None
- try:
- process_model = ProcessModelService.get_process_model(process_model_id)
- except ProcessEntityNotFoundError as exception:
- raise (
- ApiError(
- error_code="process_model_cannot_be_found",
- message=f"Process model cannot be found: {process_model_id}",
- status_code=400,
- )
- ) from exception
-
- return process_model
+def _commit_and_push_to_git(message: str) -> None:
+ """Commit_and_push_to_git."""
+ if current_app.config["GIT_COMMIT_ON_SAVE"]:
+ git_output = GitService.commit(message=message)
+ current_app.logger.info(f"git output: {git_output}")
+ else:
+ current_app.logger.info("Git commit on save is disabled")
-def find_principal_or_raise() -> PrincipalModel:
- """Find_principal_or_raise."""
- principal = PrincipalModel.query.filter_by(user_id=g.user.id).first()
- if principal is None:
- raise (
- ApiError(
- error_code="principal_not_found",
- message=f"Principal not found from user id: {g.user.id}",
- status_code=400,
- )
- )
- return principal # type: ignore
+def _un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str:
+ """Un_modify_modified_process_model_id."""
+ return modified_process_model_identifier.replace(":", "/")
-def find_process_instance_by_id_or_raise(
+def _find_process_instance_by_id_or_raise(
process_instance_id: int,
) -> ProcessInstanceModel:
"""Find_process_instance_by_id_or_raise."""
@@ -1872,257 +289,34 @@ def find_process_instance_by_id_or_raise(
return process_instance # type: ignore
-def get_value_from_array_with_index(array: list, index: int) -> Any:
- """Get_value_from_array_with_index."""
- if index < 0:
- return None
-
- if index >= len(array):
- return None
-
- return array[index]
-
-
-def prepare_form_data(
- form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo
-) -> str:
- """Prepare_form_data."""
- if task_data is None:
- return ""
-
- file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
- return render_jinja_template(file_contents, task_data)
-
-
-def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str:
- """Render_jinja_template."""
- jinja_environment = jinja2.Environment(
- autoescape=True, lstrip_blocks=True, trim_blocks=True
- )
- template = jinja_environment.from_string(unprocessed_template)
- return template.render(**data)
-
-
-def get_spiff_task_from_process_instance(
- task_id: str,
- process_instance: ProcessInstanceModel,
- processor: Union[ProcessInstanceProcessor, None] = None,
-) -> SpiffTask:
- """Get_spiff_task_from_process_instance."""
- if processor is None:
- processor = ProcessInstanceProcessor(process_instance)
- task_uuid = uuid.UUID(task_id)
- spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
-
- if spiff_task is None:
+# process_model_id uses forward slashes on all OSes
+# this seems to return an object where process_model.id has backslashes on windows
+def _get_process_model(process_model_id: str) -> ProcessModelInfo:
+ """Get_process_model."""
+ process_model = None
+ try:
+ process_model = ProcessModelService.get_process_model(process_model_id)
+ except ProcessEntityNotFoundError as exception:
raise (
ApiError(
- error_code="empty_task",
- message="Processor failed to obtain task.",
- status_code=500,
+ error_code="process_model_cannot_be_found",
+ message=f"Process model cannot be found: {process_model_id}",
+ status_code=400,
)
- )
- return spiff_task
+ ) from exception
+
+ return process_model
-# sample body:
-# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
-# "full_name": "sartography/sample-process-models", "private": False .... }}
-# test with: ngrok http 7000
-# where 7000 is the port the app is running on locally
-def github_webhook_receive(body: Dict) -> Response:
- """Github_webhook_receive."""
- auth_header = request.headers.get("X-Hub-Signature-256")
- AuthorizationService.verify_sha256_token(auth_header)
- result = GitService.handle_web_hook(body)
- return Response(
- json.dumps({"git_pull": result}), status=200, mimetype="application/json"
- )
-
-
-#
-# Methods for secrets CRUD - maybe move somewhere else:
-#
-
-
-def get_secret(key: str) -> Optional[str]:
- """Get_secret."""
- return SecretService.get_secret(key)
-
-
-def secret_list(
- page: int = 1,
- per_page: int = 100,
-) -> Response:
- """Secret_list."""
- secrets = (
- SecretModel.query.order_by(SecretModel.key)
- .join(UserModel)
- .add_columns(
- UserModel.username,
- )
- .paginate(page=page, per_page=per_page, error_out=False)
- )
- response_json = {
- "results": secrets.items,
- "pagination": {
- "count": len(secrets.items),
- "total": secrets.total,
- "pages": secrets.pages,
- },
- }
- return make_response(jsonify(response_json), 200)
-
-
-def add_secret(body: Dict) -> Response:
- """Add secret."""
- secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
- return Response(
- json.dumps(SecretModelSchema().dump(secret_model)),
- status=201,
- mimetype="application/json",
- )
-
-
-def update_secret(key: str, body: dict) -> Response:
- """Update secret."""
- SecretService().update_secret(key, body["value"], g.user.id)
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def delete_secret(key: str) -> Response:
- """Delete secret."""
- current_user = UserService.current_user()
- SecretService.delete_secret(key, current_user.id)
- return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-
-
-def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any:
- """Get_required_parameter_or_raise."""
- return_value = None
- if parameter in post_body:
- return_value = post_body[parameter]
-
- if return_value is None or return_value == "":
+def _find_principal_or_raise() -> PrincipalModel:
+ """Find_principal_or_raise."""
+ principal = PrincipalModel.query.filter_by(user_id=g.user.id).first()
+ if principal is None:
raise (
ApiError(
- error_code="missing_required_parameter",
- message=f"Parameter is missing from json request body: {parameter}",
+ error_code="principal_not_found",
+ message=f"Principal not found from user id: {g.user.id}",
status_code=400,
)
)
-
- return return_value
-
-
-# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
-def _update_form_schema_with_task_data_as_needed(
- in_dict: dict, task_data: dict
-) -> None:
- """Update_nested."""
- for k, value in in_dict.items():
- if "anyOf" == k:
- # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"]
- if isinstance(value, list):
- if len(value) == 1:
- first_element_in_value_list = value[0]
- if isinstance(first_element_in_value_list, str):
- if first_element_in_value_list.startswith(
- "options_from_task_data_var:"
- ):
- task_data_var = first_element_in_value_list.replace(
- "options_from_task_data_var:", ""
- )
-
- if task_data_var not in task_data:
- raise (
- ApiError(
- error_code="missing_task_data_var",
- message=f"Task data is missing variable: {task_data_var}",
- status_code=500,
- )
- )
-
- select_options_from_task_data = task_data.get(task_data_var)
- if isinstance(select_options_from_task_data, list):
- if all(
- "value" in d and "label" in d
- for d in select_options_from_task_data
- ):
-
- def map_function(
- task_data_select_option: TaskDataSelectOption,
- ) -> ReactJsonSchemaSelectOption:
- """Map_function."""
- return {
- "type": "string",
- "enum": [task_data_select_option["value"]],
- "title": task_data_select_option["label"],
- }
-
- options_for_react_json_schema_form = list(
- map(map_function, select_options_from_task_data)
- )
-
- in_dict[k] = options_for_react_json_schema_form
- elif isinstance(value, dict):
- _update_form_schema_with_task_data_as_needed(value, task_data)
- elif isinstance(value, list):
- for o in value:
- if isinstance(o, dict):
- _update_form_schema_with_task_data_as_needed(o, task_data)
-
-
-def update_task_data(
- process_instance_id: str,
- modified_process_model_identifier: str,
- task_id: str,
- body: Dict,
-) -> Response:
- """Update task data."""
- process_instance = ProcessInstanceModel.query.filter(
- ProcessInstanceModel.id == int(process_instance_id)
- ).first()
- if process_instance:
- process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json)
- if "new_task_data" in body:
- new_task_data_str: str = body["new_task_data"]
- new_task_data_dict = json.loads(new_task_data_str)
- if task_id in process_instance_bpmn_json_dict["tasks"]:
- process_instance_bpmn_json_dict["tasks"][task_id][
- "data"
- ] = new_task_data_dict
- process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict)
- db.session.add(process_instance)
- try:
- db.session.commit()
- except Exception as e:
- db.session.rollback()
- raise ApiError(
- error_code="update_task_data_error",
- message=f"Could not update the Instance. Original error is {e}",
- ) from e
- else:
- raise ApiError(
- error_code="update_task_data_error",
- message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.",
- )
- else:
- raise ApiError(
- error_code="update_task_data_error",
- message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.",
- )
- return Response(
- json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
- status=200,
- mimetype="application/json",
- )
-
-
-def commit_and_push_to_git(message: str) -> None:
- """Commit_and_push_to_git."""
- if current_app.config["GIT_COMMIT_ON_SAVE"]:
- git_output = GitService.commit(message=message)
- current_app.logger.info(f"git output: {git_output}")
- else:
- current_app.logger.info("Git commit on save is disabled")
+ return principal # type: ignore
diff --git a/src/spiffworkflow_backend/routes/process_groups_controller.py b/src/spiffworkflow_backend/routes/process_groups_controller.py
new file mode 100644
index 000000000..228be1815
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/process_groups_controller.py
@@ -0,0 +1,130 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+from typing import Any
+from typing import Optional
+
+import flask.wrappers
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask.wrappers import Response
+from flask_bpmn.api.api_error import ApiError
+
+from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
+ ProcessEntityNotFoundError,
+)
+from spiffworkflow_backend.models.process_group import ProcessGroup
+from spiffworkflow_backend.models.process_group import ProcessGroupSchema
+from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _un_modify_modified_process_model_id,
+)
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
+
+
+def process_group_create(body: dict) -> flask.wrappers.Response:
+ """Add_process_group."""
+ process_group = ProcessGroup(**body)
+ ProcessModelService.add_process_group(process_group)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} added process group {process_group.id}"
+ )
+ return make_response(jsonify(process_group), 201)
+
+
+def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
+ """Process_group_delete."""
+ process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
+ ProcessModelService().process_group_delete(process_group_id)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} deleted process group {process_group_id}"
+ )
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_group_update(
+ modified_process_group_id: str, body: dict
+) -> flask.wrappers.Response:
+ """Process Group Update."""
+ body_include_list = ["display_name", "description"]
+ body_filtered = {
+ include_item: body[include_item]
+ for include_item in body_include_list
+ if include_item in body
+ }
+
+ process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
+ process_group = ProcessGroup(id=process_group_id, **body_filtered)
+ ProcessModelService.update_process_group(process_group)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} updated process group {process_group_id}"
+ )
+ return make_response(jsonify(process_group), 200)
+
+
+def process_group_list(
+ process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
+) -> flask.wrappers.Response:
+ """Process_group_list."""
+ if process_group_identifier is not None:
+ process_groups = ProcessModelService.get_process_groups(
+ process_group_identifier
+ )
+ else:
+ process_groups = ProcessModelService.get_process_groups()
+ batch = ProcessModelService().get_batch(
+ items=process_groups, page=page, per_page=per_page
+ )
+ pages = len(process_groups) // per_page
+ remainder = len(process_groups) % per_page
+ if remainder > 0:
+ pages += 1
+
+ response_json = {
+ "results": ProcessGroupSchema(many=True).dump(batch),
+ "pagination": {
+ "count": len(batch),
+ "total": len(process_groups),
+ "pages": pages,
+ },
+ }
+ return Response(json.dumps(response_json), status=200, mimetype="application/json")
+
+
+def process_group_show(
+ modified_process_group_id: str,
+) -> Any:
+ """Process_group_show."""
+ process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
+ try:
+ process_group = ProcessModelService.get_process_group(process_group_id)
+ except ProcessEntityNotFoundError as exception:
+ raise (
+ ApiError(
+ error_code="process_group_cannot_be_found",
+ message=f"Process group cannot be found: {process_group_id}",
+ status_code=400,
+ )
+ ) from exception
+
+ process_group.parent_groups = ProcessModelService.get_parent_group_array(
+ process_group.id
+ )
+ return make_response(jsonify(process_group), 200)
+
+
+def process_group_move(
+ modified_process_group_identifier: str, new_location: str
+) -> flask.wrappers.Response:
+ """Process_group_move."""
+ original_process_group_id = _un_modify_modified_process_model_id(
+ modified_process_group_identifier
+ )
+ new_process_group = ProcessModelService().process_group_move(
+ original_process_group_id, new_location
+ )
+ _commit_and_push_to_git(
+ f"User: {g.user.username} moved process group {original_process_group_id} to"
+ f" {new_process_group.id}"
+ )
+ return make_response(jsonify(new_process_group), 200)
diff --git a/src/spiffworkflow_backend/routes/process_instances_controller.py b/src/spiffworkflow_backend/routes/process_instances_controller.py
new file mode 100644
index 000000000..5fbaecdf4
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/process_instances_controller.py
@@ -0,0 +1,696 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+from typing import Any
+from typing import Dict
+from typing import Optional
+
+import flask.wrappers
+from flask import current_app
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask import request
+from flask.wrappers import Response
+from flask_bpmn.api.api_error import ApiError
+from flask_bpmn.models.db import db
+from SpiffWorkflow.task import TaskState # type: ignore
+from sqlalchemy import and_
+from sqlalchemy import or_
+
+from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
+from spiffworkflow_backend.models.process_instance import (
+ ProcessInstanceCannotBeDeletedError,
+)
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
+from spiffworkflow_backend.models.process_instance_metadata import (
+ ProcessInstanceMetadataModel,
+)
+from spiffworkflow_backend.models.process_instance_report import (
+ ProcessInstanceReportModel,
+)
+from spiffworkflow_backend.models.process_model import ProcessModelInfo
+from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
+from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
+from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
+from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
+from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _find_process_instance_by_id_or_raise,
+)
+from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _un_modify_modified_process_model_id,
+)
+from spiffworkflow_backend.services.authorization_service import AuthorizationService
+from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
+from spiffworkflow_backend.services.git_service import GitCommandError
+from spiffworkflow_backend.services.git_service import GitService
+from spiffworkflow_backend.services.message_service import MessageService
+from spiffworkflow_backend.services.process_instance_processor import (
+ ProcessInstanceProcessor,
+)
+from spiffworkflow_backend.services.process_instance_report_service import (
+ ProcessInstanceReportFilter,
+)
+from spiffworkflow_backend.services.process_instance_report_service import (
+ ProcessInstanceReportService,
+)
+from spiffworkflow_backend.services.process_instance_service import (
+ ProcessInstanceService,
+)
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
+from spiffworkflow_backend.services.spec_file_service import SpecFileService
+
+
+def process_instance_create(
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Create_process_instance."""
+ process_model_identifier = _un_modify_modified_process_model_id(
+ modified_process_model_identifier
+ )
+ process_instance = (
+ ProcessInstanceService.create_process_instance_from_process_model_identifier(
+ process_model_identifier, g.user
+ )
+ )
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=201,
+ mimetype="application/json",
+ )
+
+
+def process_instance_run(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ do_engine_steps: bool = True,
+) -> flask.wrappers.Response:
+ """Process_instance_run."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ if process_instance.status != "not_started":
+ raise ApiError(
+ error_code="process_instance_not_runnable",
+ message=(
+ f"Process Instance ({process_instance.id}) is currently running or has"
+ " already run."
+ ),
+ status_code=400,
+ )
+
+ processor = ProcessInstanceProcessor(process_instance)
+
+ if do_engine_steps:
+ try:
+ processor.do_engine_steps(save=True)
+ except ApiError as e:
+ ErrorHandlingService().handle_error(processor, e)
+ raise e
+ except Exception as e:
+ ErrorHandlingService().handle_error(processor, e)
+ task = processor.bpmn_process_instance.last_task
+ raise ApiError.from_task(
+ error_code="unknown_exception",
+ message=f"An unknown error occurred. Original error: {e}",
+ status_code=400,
+ task=task,
+ ) from e
+
+ if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
+ MessageService.process_message_instances()
+
+ process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
+ processor
+ )
+ process_instance_data = processor.get_data()
+ process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
+ process_instance_metadata["data"] = process_instance_data
+ return Response(
+ json.dumps(process_instance_metadata), status=200, mimetype="application/json"
+ )
+
+
+def process_instance_terminate(
+ process_instance_id: int,
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Process_instance_run."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.terminate()
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_instance_suspend(
+ process_instance_id: int,
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Process_instance_suspend."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.suspend()
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_instance_resume(
+ process_instance_id: int,
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Process_instance_resume."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.resume()
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_instance_log_list(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ page: int = 1,
+ per_page: int = 100,
+ detailed: bool = False,
+) -> flask.wrappers.Response:
+ """Process_instance_log_list."""
+ # to make sure the process instance exists
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+
+ log_query = SpiffLoggingModel.query.filter(
+ SpiffLoggingModel.process_instance_id == process_instance.id
+ )
+ if not detailed:
+ log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
+
+ logs = (
+ log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
+ .join(
+ UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True
+ ) # isouter since if we don't have a user, we still want the log
+ .add_columns(
+ UserModel.username,
+ )
+ .paginate(page=page, per_page=per_page, error_out=False)
+ )
+
+ response_json = {
+ "results": logs.items,
+ "pagination": {
+ "count": len(logs.items),
+ "total": logs.total,
+ "pages": logs.pages,
+ },
+ }
+
+ return make_response(jsonify(response_json), 200)
+
+
+def process_instance_list_for_me(
+ process_model_identifier: Optional[str] = None,
+ page: int = 1,
+ per_page: int = 100,
+ start_from: Optional[int] = None,
+ start_to: Optional[int] = None,
+ end_from: Optional[int] = None,
+ end_to: Optional[int] = None,
+ process_status: Optional[str] = None,
+ user_filter: Optional[bool] = False,
+ report_identifier: Optional[str] = None,
+ report_id: Optional[int] = None,
+ user_group_identifier: Optional[str] = None,
+) -> flask.wrappers.Response:
+ """Process_instance_list_for_me."""
+ return process_instance_list(
+ process_model_identifier=process_model_identifier,
+ page=page,
+ per_page=per_page,
+ start_from=start_from,
+ start_to=start_to,
+ end_from=end_from,
+ end_to=end_to,
+ process_status=process_status,
+ user_filter=user_filter,
+ report_identifier=report_identifier,
+ report_id=report_id,
+ user_group_identifier=user_group_identifier,
+ with_relation_to_me=True,
+ )
+
+
+def process_instance_list(
+ process_model_identifier: Optional[str] = None,
+ page: int = 1,
+ per_page: int = 100,
+ start_from: Optional[int] = None,
+ start_to: Optional[int] = None,
+ end_from: Optional[int] = None,
+ end_to: Optional[int] = None,
+ process_status: Optional[str] = None,
+ with_relation_to_me: Optional[bool] = None,
+ user_filter: Optional[bool] = False,
+ report_identifier: Optional[str] = None,
+ report_id: Optional[int] = None,
+ user_group_identifier: Optional[str] = None,
+) -> flask.wrappers.Response:
+ """Process_instance_list."""
+ process_instance_report = ProcessInstanceReportService.report_with_identifier(
+ g.user, report_id, report_identifier
+ )
+
+ if user_filter:
+ report_filter = ProcessInstanceReportFilter(
+ process_model_identifier=process_model_identifier,
+ user_group_identifier=user_group_identifier,
+ start_from=start_from,
+ start_to=start_to,
+ end_from=end_from,
+ end_to=end_to,
+ with_relation_to_me=with_relation_to_me,
+ process_status=process_status.split(",") if process_status else None,
+ )
+ else:
+ report_filter = (
+ ProcessInstanceReportService.filter_from_metadata_with_overrides(
+ process_instance_report=process_instance_report,
+ process_model_identifier=process_model_identifier,
+ user_group_identifier=user_group_identifier,
+ start_from=start_from,
+ start_to=start_to,
+ end_from=end_from,
+ end_to=end_to,
+ process_status=process_status,
+ with_relation_to_me=with_relation_to_me,
+ )
+ )
+
+ response_json = ProcessInstanceReportService.run_process_instance_report(
+ report_filter=report_filter,
+ process_instance_report=process_instance_report,
+ page=page,
+ per_page=per_page,
+ user=g.user,
+ )
+
+ return make_response(jsonify(response_json), 200)
+
+
+def process_instance_report_column_list() -> flask.wrappers.Response:
+ """Process_instance_report_column_list."""
+ table_columns = ProcessInstanceReportService.builtin_column_options()
+ columns_for_metadata = (
+ db.session.query(ProcessInstanceMetadataModel.key)
+ .order_by(ProcessInstanceMetadataModel.key)
+ .distinct() # type: ignore
+ .all()
+ )
+ columns_for_metadata_strings = [
+ {"Header": i[0], "accessor": i[0], "filterable": True}
+ for i in columns_for_metadata
+ ]
+ return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
+
+
+def process_instance_show_for_me(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ process_identifier: Optional[str] = None,
+) -> flask.wrappers.Response:
+ """Process_instance_show_for_me."""
+ process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
+ return _get_process_instance(
+ process_instance=process_instance,
+ modified_process_model_identifier=modified_process_model_identifier,
+ process_identifier=process_identifier,
+ )
+
+
+def process_instance_show(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ process_identifier: Optional[str] = None,
+) -> flask.wrappers.Response:
+ """Create_process_instance."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ return _get_process_instance(
+ process_instance=process_instance,
+ modified_process_model_identifier=modified_process_model_identifier,
+ process_identifier=process_identifier,
+ )
+
+
+def process_instance_delete(
+ process_instance_id: int, modified_process_model_identifier: str
+) -> flask.wrappers.Response:
+ """Create_process_instance."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+
+ if not process_instance.has_terminal_status():
+ raise ProcessInstanceCannotBeDeletedError(
+ f"Process instance ({process_instance.id}) cannot be deleted since it does"
+ f" not have a terminal status. Current status is {process_instance.status}."
+ )
+
+ # (Pdb) db.session.delete
+ # >
+ db.session.query(SpiffLoggingModel).filter_by(
+ process_instance_id=process_instance.id
+ ).delete()
+ db.session.query(SpiffStepDetailsModel).filter_by(
+ process_instance_id=process_instance.id
+ ).delete()
+ db.session.delete(process_instance)
+ db.session.commit()
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_instance_report_list(
+ page: int = 1, per_page: int = 100
+) -> flask.wrappers.Response:
+ """Process_instance_report_list."""
+ process_instance_reports = ProcessInstanceReportModel.query.filter_by(
+ created_by_id=g.user.id,
+ ).all()
+
+ return make_response(jsonify(process_instance_reports), 200)
+
+
+def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
+ """Process_instance_report_create."""
+ process_instance_report = ProcessInstanceReportModel.create_report(
+ identifier=body["identifier"],
+ user=g.user,
+ report_metadata=body["report_metadata"],
+ )
+
+ return make_response(jsonify(process_instance_report), 201)
+
+
+def process_instance_report_update(
+ report_id: int,
+ body: Dict[str, Any],
+) -> flask.wrappers.Response:
+ """Process_instance_report_update."""
+ process_instance_report = ProcessInstanceReportModel.query.filter_by(
+ id=report_id,
+ created_by_id=g.user.id,
+ ).first()
+ if process_instance_report is None:
+ raise ApiError(
+ error_code="unknown_process_instance_report",
+ message="Unknown process instance report",
+ status_code=404,
+ )
+
+ process_instance_report.report_metadata = body["report_metadata"]
+ db.session.commit()
+
+ return make_response(jsonify(process_instance_report), 201)
+
+
+def process_instance_report_delete(
+ report_id: int,
+) -> flask.wrappers.Response:
+ """Process_instance_report_delete."""
+ process_instance_report = ProcessInstanceReportModel.query.filter_by(
+ id=report_id,
+ created_by_id=g.user.id,
+ ).first()
+ if process_instance_report is None:
+ raise ApiError(
+ error_code="unknown_process_instance_report",
+ message="Unknown process instance report",
+ status_code=404,
+ )
+
+ db.session.delete(process_instance_report)
+ db.session.commit()
+
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_instance_report_show(
+ report_id: int,
+ page: int = 1,
+ per_page: int = 100,
+) -> flask.wrappers.Response:
+ """Process_instance_report_show."""
+ process_instances = ProcessInstanceModel.query.order_by(
+ ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
+ ).paginate(page=page, per_page=per_page, error_out=False)
+
+ process_instance_report = ProcessInstanceReportModel.query.filter_by(
+ id=report_id,
+ created_by_id=g.user.id,
+ ).first()
+ if process_instance_report is None:
+ raise ApiError(
+ error_code="unknown_process_instance_report",
+ message="Unknown process instance report",
+ status_code=404,
+ )
+
+ substitution_variables = request.args.to_dict()
+ result_dict = process_instance_report.generate_report(
+ process_instances.items, substitution_variables
+ )
+
+ # update this if we go back to a database query instead of filtering in memory
+ result_dict["pagination"] = {
+ "count": len(result_dict["results"]),
+ "total": len(result_dict["results"]),
+ "pages": 1,
+ }
+
+ return Response(json.dumps(result_dict), status=200, mimetype="application/json")
+
+
+def process_instance_task_list_without_task_data_for_me(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ all_tasks: bool = False,
+ spiff_step: int = 0,
+) -> flask.wrappers.Response:
+ """Process_instance_task_list_without_task_data_for_me."""
+ process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
+ return process_instance_task_list(
+ modified_process_model_identifier,
+ process_instance,
+ all_tasks,
+ spiff_step,
+ get_task_data=False,
+ )
+
+
+def process_instance_task_list_without_task_data(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ all_tasks: bool = False,
+ spiff_step: int = 0,
+) -> flask.wrappers.Response:
+ """Process_instance_task_list_without_task_data."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ return process_instance_task_list(
+ modified_process_model_identifier,
+ process_instance,
+ all_tasks,
+ spiff_step,
+ get_task_data=False,
+ )
+
+
+def process_instance_task_list_with_task_data(
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ all_tasks: bool = False,
+ spiff_step: int = 0,
+) -> flask.wrappers.Response:
+ """Process_instance_task_list_with_task_data."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ return process_instance_task_list(
+ modified_process_model_identifier,
+ process_instance,
+ all_tasks,
+ spiff_step,
+ get_task_data=True,
+ )
+
+
+def process_instance_task_list(
+ _modified_process_model_identifier: str,
+ process_instance: ProcessInstanceModel,
+ all_tasks: bool = False,
+ spiff_step: int = 0,
+ get_task_data: bool = False,
+) -> flask.wrappers.Response:
+ """Process_instance_task_list."""
+ if spiff_step > 0:
+ step_detail = (
+ db.session.query(SpiffStepDetailsModel)
+ .filter(
+ SpiffStepDetailsModel.process_instance_id == process_instance.id,
+ SpiffStepDetailsModel.spiff_step == spiff_step,
+ )
+ .first()
+ )
+ if step_detail is not None and process_instance.bpmn_json is not None:
+ bpmn_json = json.loads(process_instance.bpmn_json)
+ bpmn_json["tasks"] = step_detail.task_json["tasks"]
+ bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
+ process_instance.bpmn_json = json.dumps(bpmn_json)
+
+ processor = ProcessInstanceProcessor(process_instance)
+
+ spiff_tasks = None
+ if all_tasks:
+ spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
+ else:
+ spiff_tasks = processor.get_all_user_tasks()
+
+ subprocesses_by_child_task_ids = processor.get_subprocesses_by_child_task_ids()
+ tasks = []
+ for spiff_task in spiff_tasks:
+ calling_subprocess_task_id = subprocesses_by_child_task_ids.get(
+ str(spiff_task.id), None
+ )
+ task = ProcessInstanceService.spiff_task_to_api_task(
+ processor, spiff_task, calling_subprocess_task_id=calling_subprocess_task_id
+ )
+ if get_task_data:
+ task.data = spiff_task.data
+ tasks.append(task)
+
+ return make_response(jsonify(tasks), 200)
+
+
+def process_instance_reset(
+ process_instance_id: int,
+ modified_process_model_identifier: str,
+ spiff_step: int = 0,
+) -> flask.wrappers.Response:
+ """Reset a process instance to a particular step."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.reset_process(spiff_step)
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_instance_find_by_id(
+ process_instance_id: int,
+) -> flask.wrappers.Response:
+ """Process_instance_find_by_id."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ modified_process_model_identifier = (
+ ProcessModelInfo.modify_process_identifier_for_path_param(
+ process_instance.process_model_identifier
+ )
+ )
+ process_instance_uri = (
+ f"/process-instances/{modified_process_model_identifier}/{process_instance.id}"
+ )
+ has_permission = AuthorizationService.user_has_permission(
+ user=g.user,
+ permission="read",
+ target_uri=process_instance_uri,
+ )
+
+ uri_type = None
+ if not has_permission:
+ process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
+ uri_type = "for-me"
+
+ response_json = {
+ "process_instance": process_instance,
+ "uri_type": uri_type,
+ }
+ return make_response(jsonify(response_json), 200)
+
+
+def _get_process_instance(
+ modified_process_model_identifier: str,
+ process_instance: ProcessInstanceModel,
+ process_identifier: Optional[str] = None,
+) -> flask.wrappers.Response:
+ """_get_process_instance."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ try:
+ current_version_control_revision = GitService.get_current_revision()
+ except GitCommandError:
+ current_version_control_revision = ""
+
+ process_model_with_diagram = None
+ name_of_file_with_diagram = None
+ if process_identifier:
+ spec_reference = SpecReferenceCache.query.filter_by(
+ identifier=process_identifier, type="process"
+ ).first()
+ if spec_reference is None:
+ raise SpecReferenceNotFoundError(
+ "Could not find given process identifier in the cache:"
+ f" {process_identifier}"
+ )
+
+ process_model_with_diagram = ProcessModelService.get_process_model(
+ spec_reference.process_model_id
+ )
+ name_of_file_with_diagram = spec_reference.file_name
+ else:
+ process_model_with_diagram = _get_process_model(process_model_identifier)
+ if process_model_with_diagram.primary_file_name:
+ name_of_file_with_diagram = process_model_with_diagram.primary_file_name
+
+ if process_model_with_diagram and name_of_file_with_diagram:
+ if (
+ process_instance.bpmn_version_control_identifier
+ == current_version_control_revision
+ ):
+ bpmn_xml_file_contents = SpecFileService.get_data(
+ process_model_with_diagram, name_of_file_with_diagram
+ ).decode("utf-8")
+ else:
+ bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
+ process_model_with_diagram,
+ process_instance.bpmn_version_control_identifier,
+ file_name=name_of_file_with_diagram,
+ )
+ process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
+
+ return make_response(jsonify(process_instance), 200)
+
+
+def _find_process_instance_for_me_or_raise(
+ process_instance_id: int,
+) -> ProcessInstanceModel:
+ """_find_process_instance_for_me_or_raise."""
+ process_instance: ProcessInstanceModel = (
+ ProcessInstanceModel.query.filter_by(id=process_instance_id)
+ .outerjoin(HumanTaskModel)
+ .outerjoin(
+ HumanTaskUserModel,
+ and_(
+ HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+ HumanTaskUserModel.user_id == g.user.id,
+ ),
+ )
+ .filter(
+ or_(
+ HumanTaskUserModel.id.is_not(None),
+ ProcessInstanceModel.process_initiator_id == g.user.id,
+ )
+ )
+ .first()
+ )
+
+ if process_instance is None:
+ raise (
+ ApiError(
+ error_code="process_instance_cannot_be_found",
+ message=(
+ f"Process instance with id {process_instance_id} cannot be found"
+ " that is associated with you."
+ ),
+ status_code=400,
+ )
+ )
+
+ return process_instance
diff --git a/src/spiffworkflow_backend/routes/process_models_controller.py b/src/spiffworkflow_backend/routes/process_models_controller.py
new file mode 100644
index 000000000..1709357a7
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/process_models_controller.py
@@ -0,0 +1,496 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+import os
+import re
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import Union
+
+import connexion # type: ignore
+import flask.wrappers
+from flask import current_app
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask.wrappers import Response
+from flask_bpmn.api.api_error import ApiError
+
+from spiffworkflow_backend.interfaces import IdToProcessGroupMapping
+from spiffworkflow_backend.models.file import FileSchema
+from spiffworkflow_backend.models.process_group import ProcessGroup
+from spiffworkflow_backend.models.process_instance_report import (
+ ProcessInstanceReportModel,
+)
+from spiffworkflow_backend.models.process_model import ProcessModelInfo
+from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
+from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
+from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _un_modify_modified_process_model_id,
+)
+from spiffworkflow_backend.services.git_service import GitService
+from spiffworkflow_backend.services.git_service import MissingGitConfigsError
+from spiffworkflow_backend.services.process_instance_report_service import (
+ ProcessInstanceReportService,
+)
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
+from spiffworkflow_backend.services.spec_file_service import SpecFileService
+
+
+def process_model_create(
+ modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
+) -> flask.wrappers.Response:
+ """Process_model_create."""
+ body_include_list = [
+ "id",
+ "display_name",
+ "primary_file_name",
+ "primary_process_id",
+ "description",
+ "metadata_extraction_paths",
+ ]
+ body_filtered = {
+ include_item: body[include_item]
+ for include_item in body_include_list
+ if include_item in body
+ }
+
+ _get_process_group_from_modified_identifier(modified_process_group_id)
+
+ process_model_info = ProcessModelInfo(**body_filtered) # type: ignore
+ if process_model_info is None:
+ raise ApiError(
+ error_code="process_model_could_not_be_created",
+ message=f"Process Model could not be created from given body: {body}",
+ status_code=400,
+ )
+
+ ProcessModelService.add_process_model(process_model_info)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} created process model {process_model_info.id}"
+ )
+ return Response(
+ json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
+ status=201,
+ mimetype="application/json",
+ )
+
+
+def process_model_delete(
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Process_model_delete."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ ProcessModelService().process_model_delete(process_model_identifier)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} deleted process model {process_model_identifier}"
+ )
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_model_update(
+ modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
+) -> Any:
+ """Process_model_update."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ body_include_list = [
+ "display_name",
+ "primary_file_name",
+ "primary_process_id",
+ "description",
+ "metadata_extraction_paths",
+ ]
+ body_filtered = {
+ include_item: body[include_item]
+ for include_item in body_include_list
+ if include_item in body
+ }
+
+ process_model = _get_process_model(process_model_identifier)
+ ProcessModelService.update_process_model(process_model, body_filtered)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} updated process model {process_model_identifier}"
+ )
+ return ProcessModelInfoSchema().dump(process_model)
+
+
+def process_model_show(modified_process_model_identifier: str) -> Any:
+ """Process_model_show."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ process_model = _get_process_model(process_model_identifier)
+ files = sorted(
+ SpecFileService.get_files(process_model),
+ key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
+ )
+ process_model.files = files
+ for file in process_model.files:
+ file.references = SpecFileService.get_references_for_file(file, process_model)
+
+ process_model.parent_groups = ProcessModelService.get_parent_group_array(
+ process_model.id
+ )
+ return make_response(jsonify(process_model), 200)
+
+
+def process_model_move(
+ modified_process_model_identifier: str, new_location: str
+) -> flask.wrappers.Response:
+ """Process_model_move."""
+ original_process_model_id = _un_modify_modified_process_model_id(
+ modified_process_model_identifier
+ )
+ new_process_model = ProcessModelService().process_model_move(
+ original_process_model_id, new_location
+ )
+ _commit_and_push_to_git(
+ f"User: {g.user.username} moved process model {original_process_model_id} to"
+ f" {new_process_model.id}"
+ )
+ return make_response(jsonify(new_process_model), 200)
+
+
+def process_model_publish(
+ modified_process_model_identifier: str, branch_to_update: Optional[str] = None
+) -> flask.wrappers.Response:
+ """Process_model_publish."""
+ if branch_to_update is None:
+ branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
+ if branch_to_update is None:
+ raise MissingGitConfigsError(
+ "Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
+ "This is required for publishing process models"
+ )
+ process_model_identifier = _un_modify_modified_process_model_id(
+ modified_process_model_identifier
+ )
+ pr_url = GitService().publish(process_model_identifier, branch_to_update)
+ data = {"ok": True, "pr_url": pr_url}
+ return Response(json.dumps(data), status=200, mimetype="application/json")
+
+
+def process_model_list(
+ process_group_identifier: Optional[str] = None,
+ recursive: Optional[bool] = False,
+ filter_runnable_by_user: Optional[bool] = False,
+ include_parent_groups: Optional[bool] = False,
+ page: int = 1,
+ per_page: int = 100,
+) -> flask.wrappers.Response:
+ """Process model list!"""
+ process_models = ProcessModelService.get_process_models(
+ process_group_id=process_group_identifier,
+ recursive=recursive,
+ filter_runnable_by_user=filter_runnable_by_user,
+ )
+ process_models_to_return = ProcessModelService().get_batch(
+ process_models, page=page, per_page=per_page
+ )
+
+ if include_parent_groups:
+ process_group_cache = IdToProcessGroupMapping({})
+ for process_model in process_models_to_return:
+ parent_group_lites_with_cache = (
+ ProcessModelService.get_parent_group_array_and_cache_it(
+ process_model.id, process_group_cache
+ )
+ )
+ process_model.parent_groups = parent_group_lites_with_cache[
+ "process_groups"
+ ]
+
+ pages = len(process_models) // per_page
+ remainder = len(process_models) % per_page
+ if remainder > 0:
+ pages += 1
+ response_json = {
+ "results": process_models_to_return,
+ "pagination": {
+ "count": len(process_models_to_return),
+ "total": len(process_models),
+ "pages": pages,
+ },
+ }
+ return make_response(jsonify(response_json), 200)
+
+
+def process_model_file_update(
+ modified_process_model_identifier: str, file_name: str
+) -> flask.wrappers.Response:
+ """Process_model_file_update."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ process_model = _get_process_model(process_model_identifier)
+
+ request_file = _get_file_from_request()
+ request_file_contents = request_file.stream.read()
+ if not request_file_contents:
+ raise ApiError(
+ error_code="file_contents_empty",
+ message="Given request file does not have any content",
+ status_code=400,
+ )
+
+ SpecFileService.update_file(process_model, file_name, request_file_contents)
+ _commit_and_push_to_git(
+ f"User: {g.user.username} clicked save for"
+ f" {process_model_identifier}/{file_name}"
+ )
+
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_model_file_delete(
+ modified_process_model_identifier: str, file_name: str
+) -> flask.wrappers.Response:
+ """Process_model_file_delete."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ process_model = _get_process_model(process_model_identifier)
+ try:
+ SpecFileService.delete_file(process_model, file_name)
+ except FileNotFoundError as exception:
+ raise (
+ ApiError(
+ error_code="process_model_file_cannot_be_found",
+ message=f"Process model file cannot be found: {file_name}",
+ status_code=400,
+ )
+ ) from exception
+
+ _commit_and_push_to_git(
+ f"User: {g.user.username} deleted process model file"
+ f" {process_model_identifier}/{file_name}"
+ )
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def process_model_file_create(
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Process_model_file_create."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ process_model = _get_process_model(process_model_identifier)
+ request_file = _get_file_from_request()
+ if not request_file.filename:
+ raise ApiError(
+ error_code="could_not_get_filename",
+ message="Could not get filename from request",
+ status_code=400,
+ )
+
+ file = SpecFileService.add_file(
+ process_model, request_file.filename, request_file.stream.read()
+ )
+ file_contents = SpecFileService.get_data(process_model, file.name)
+ file.file_contents = file_contents
+ file.process_model_id = process_model.id
+ _commit_and_push_to_git(
+ f"User: {g.user.username} added process model file"
+ f" {process_model_identifier}/{file.name}"
+ )
+ return Response(
+ json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
+ )
+
+
+def process_model_file_show(
+ modified_process_model_identifier: str, file_name: str
+) -> Any:
+ """Process_model_file_show."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ process_model = _get_process_model(process_model_identifier)
+ files = SpecFileService.get_files(process_model, file_name)
+ if len(files) == 0:
+ raise ApiError(
+ error_code="unknown file",
+ message=(
+ f"No information exists for file {file_name}"
+ f" it does not exist in workflow {process_model_identifier}."
+ ),
+ status_code=404,
+ )
+
+ file = files[0]
+ file_contents = SpecFileService.get_data(process_model, file.name)
+ file.file_contents = file_contents
+ file.process_model_id = process_model.id
+ return FileSchema().dump(file)
+
+
+# {
+# "natural_language_text": "Create a bug tracker process model \
+# with a bug-details form that collects summary, description, and priority"
+# }
+def process_model_create_with_natural_language(
+ modified_process_group_id: str, body: Dict[str, str]
+) -> flask.wrappers.Response:
+ """Process_model_create_with_natural_language."""
+ pattern = re.compile(
+ r"Create a (?P.*?) process model with a (?P.*?) form that"
+ r" collects (?P.*)"
+ )
+ match = pattern.match(body["natural_language_text"])
+ if match is None:
+ raise ApiError(
+ error_code="natural_language_text_not_yet_supported",
+ message=(
+ "Natural language text is not yet supported. Please use the form:"
+ f" {pattern.pattern}"
+ ),
+ status_code=400,
+ )
+ process_model_display_name = match.group("pm_name")
+ process_model_identifier = re.sub(r"[ _]", "-", process_model_display_name)
+ process_model_identifier = re.sub(r"-{2,}", "-", process_model_identifier).lower()
+
+ form_name = match.group("form_name")
+ form_identifier = re.sub(r"[ _]", "-", form_name)
+ form_identifier = re.sub(r"-{2,}", "-", form_identifier).lower()
+
+ column_names = match.group("columns")
+ columns = re.sub(r"(, (and )?)", ",", column_names).split(",")
+
+ process_group = _get_process_group_from_modified_identifier(
+ modified_process_group_id
+ )
+ qualified_process_model_identifier = (
+ f"{process_group.id}/{process_model_identifier}"
+ )
+
+ metadata_extraction_paths = []
+ for column in columns:
+ metadata_extraction_paths.append({"key": column, "path": column})
+
+ process_model_attributes = {
+ "id": qualified_process_model_identifier,
+ "display_name": process_model_display_name,
+ "description": None,
+ "metadata_extraction_paths": metadata_extraction_paths,
+ }
+
+ process_model_info = ProcessModelInfo(**process_model_attributes) # type: ignore
+ if process_model_info is None:
+ raise ApiError(
+ error_code="process_model_could_not_be_created",
+ message=f"Process Model could not be created from given body: {body}",
+ status_code=400,
+ )
+
+ bpmn_template_file = os.path.join(
+ current_app.root_path, "templates", "basic_with_user_task_template.bpmn"
+ )
+ if not os.path.exists(bpmn_template_file):
+ raise ApiError(
+ error_code="bpmn_template_file_does_not_exist",
+ message="Could not find the bpmn template file to create process model.",
+ status_code=500,
+ )
+
+ ProcessModelService.add_process_model(process_model_info)
+ bpmn_process_identifier = f"{process_model_identifier}_process"
+ bpmn_template_contents = ""
+ with open(bpmn_template_file, encoding="utf-8") as f:
+ bpmn_template_contents = f.read()
+
+ bpmn_template_contents = bpmn_template_contents.replace(
+ "natural_language_process_id_template", bpmn_process_identifier
+ )
+ bpmn_template_contents = bpmn_template_contents.replace(
+ "form-identifier-id-template", form_identifier
+ )
+
+ form_uischema_json: dict = {"ui:order": columns}
+
+ form_properties: dict = {}
+ for column in columns:
+ form_properties[column] = {
+ "type": "string",
+ "title": column,
+ }
+ form_schema_json = {
+ "title": form_identifier,
+ "description": "",
+ "properties": form_properties,
+ "required": [],
+ }
+
+ SpecFileService.add_file(
+ process_model_info,
+ f"{process_model_identifier}.bpmn",
+ str.encode(bpmn_template_contents),
+ )
+ SpecFileService.add_file(
+ process_model_info,
+ f"{form_identifier}-schema.json",
+ str.encode(json.dumps(form_schema_json)),
+ )
+ SpecFileService.add_file(
+ process_model_info,
+ f"{form_identifier}-uischema.json",
+ str.encode(json.dumps(form_uischema_json)),
+ )
+
+ _commit_and_push_to_git(
+ f"User: {g.user.username} created process model via natural language:"
+ f" {process_model_info.id}"
+ )
+
+ default_report_metadata = ProcessInstanceReportService.system_metadata_map(
+ "default"
+ )
+ for column in columns:
+ default_report_metadata["columns"].append(
+ {"Header": column, "accessor": column, "filterable": True}
+ )
+ ProcessInstanceReportModel.create_report(
+ identifier=process_model_identifier,
+ user=g.user,
+ report_metadata=default_report_metadata,
+ )
+
+ return Response(
+ json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
+ status=201,
+ mimetype="application/json",
+ )
+
+
+def _get_file_from_request() -> Any:
+ """Get_file_from_request."""
+ request_file = connexion.request.files.get("file")
+ if not request_file:
+ raise ApiError(
+ error_code="no_file_given",
+ message="Given request does not contain a file",
+ status_code=400,
+ )
+ return request_file
+
+
+def _get_process_group_from_modified_identifier(
+ modified_process_group_id: str,
+) -> ProcessGroup:
+ """_get_process_group_from_modified_identifier."""
+ if modified_process_group_id is None:
+ raise ApiError(
+ error_code="process_group_id_not_specified",
+ message=(
+ "Process Model could not be created when process_group_id path param is"
+ " unspecified"
+ ),
+ status_code=400,
+ )
+
+ unmodified_process_group_id = _un_modify_modified_process_model_id(
+ modified_process_group_id
+ )
+ process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
+ if process_group is None:
+ raise ApiError(
+ error_code="process_model_could_not_be_created",
+ message=(
+ "Process Model could not be created from given body because Process"
+ f" Group could not be found: {unmodified_process_group_id}"
+ ),
+ status_code=400,
+ )
+ return process_group
diff --git a/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/src/spiffworkflow_backend/routes/script_unit_tests_controller.py
new file mode 100644
index 000000000..e97b26ae6
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/script_unit_tests_controller.py
@@ -0,0 +1,134 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+import random
+import string
+from typing import Dict
+from typing import Union
+
+import flask.wrappers
+from flask import current_app
+from flask import jsonify
+from flask import make_response
+from flask.wrappers import Response
+from flask_bpmn.api.api_error import ApiError
+from lxml import etree # type: ignore
+from lxml.builder import ElementMaker # type: ignore
+
+from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _get_required_parameter_or_raise,
+)
+from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
+from spiffworkflow_backend.services.spec_file_service import SpecFileService
+
+
+def script_unit_test_create(
+ modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
+) -> flask.wrappers.Response:
+ """Script_unit_test_create."""
+ bpmn_task_identifier = _get_required_parameter_or_raise(
+ "bpmn_task_identifier", body
+ )
+ input_json = _get_required_parameter_or_raise("input_json", body)
+ expected_output_json = _get_required_parameter_or_raise(
+ "expected_output_json", body
+ )
+
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ process_model = _get_process_model(process_model_identifier)
+ file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
+ if file is None:
+ raise ApiError(
+ error_code="cannot_find_file",
+ message=(
+ "Could not find the primary bpmn file for process_model:"
+ f" {process_model.id}"
+ ),
+ status_code=404,
+ )
+
+ # TODO: move this to an xml service or something
+ file_contents = SpecFileService.get_data(process_model, file.name)
+ bpmn_etree_element = etree.fromstring(file_contents)
+
+ nsmap = bpmn_etree_element.nsmap
+ spiff_element_maker = ElementMaker(
+ namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
+ )
+
+ script_task_elements = bpmn_etree_element.xpath(
+ f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
+ namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
+ )
+ if len(script_task_elements) == 0:
+ raise ApiError(
+ error_code="missing_script_task",
+ message=f"Cannot find a script task with id: {bpmn_task_identifier}",
+ status_code=404,
+ )
+ script_task_element = script_task_elements[0]
+
+ extension_elements = None
+ extension_elements_array = script_task_element.xpath(
+ ".//bpmn:extensionElements",
+ namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
+ )
+ if len(extension_elements_array) == 0:
+ bpmn_element_maker = ElementMaker(
+ namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
+ )
+ extension_elements = bpmn_element_maker("extensionElements")
+ script_task_element.append(extension_elements)
+ else:
+ extension_elements = extension_elements_array[0]
+
+ unit_test_elements = None
+ unit_test_elements_array = extension_elements.xpath(
+ "//spiffworkflow:unitTests",
+ namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"},
+ )
+ if len(unit_test_elements_array) == 0:
+ unit_test_elements = spiff_element_maker("unitTests")
+ extension_elements.append(unit_test_elements)
+ else:
+ unit_test_elements = unit_test_elements_array[0]
+
+ fuzz = "".join(
+ random.choice(string.ascii_uppercase + string.digits) # noqa: S311
+ for _ in range(7)
+ )
+ unit_test_id = f"unit_test_{fuzz}"
+
+ input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
+ expected_output_json_element = spiff_element_maker(
+ "expectedOutputJson", json.dumps(expected_output_json)
+ )
+ unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
+ unit_test_element.append(input_json_element)
+ unit_test_element.append(expected_output_json_element)
+ unit_test_elements.append(unit_test_element)
+ SpecFileService.update_file(
+ process_model, file.name, etree.tostring(bpmn_etree_element)
+ )
+
+ return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
+
+
+def script_unit_test_run(
+ modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
+) -> flask.wrappers.Response:
+ """Script_unit_test_run."""
+ # FIXME: We should probably clear this somewhere else but this works
+ current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
+ current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
+
+ python_script = _get_required_parameter_or_raise("python_script", body)
+ input_json = _get_required_parameter_or_raise("input_json", body)
+ expected_output_json = _get_required_parameter_or_raise(
+ "expected_output_json", body
+ )
+
+ result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
+ python_script, input_json, expected_output_json
+ )
+ return make_response(jsonify(result), 200)
diff --git a/src/spiffworkflow_backend/routes/secrets_controller.py b/src/spiffworkflow_backend/routes/secrets_controller.py
new file mode 100644
index 000000000..fdf4c7fae
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/secrets_controller.py
@@ -0,0 +1,67 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+from typing import Dict
+from typing import Optional
+
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask.wrappers import Response
+
+from spiffworkflow_backend.models.secret_model import SecretModel
+from spiffworkflow_backend.models.secret_model import SecretModelSchema
+from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.services.secret_service import SecretService
+from spiffworkflow_backend.services.user_service import UserService
+
+
+def secret_show(key: str) -> Optional[str]:
+ """Secret_show."""
+ return SecretService.get_secret(key)
+
+
+def secret_list(
+ page: int = 1,
+ per_page: int = 100,
+) -> Response:
+ """Secret_list."""
+ secrets = (
+ SecretModel.query.order_by(SecretModel.key)
+ .join(UserModel)
+ .add_columns(
+ UserModel.username,
+ )
+ .paginate(page=page, per_page=per_page, error_out=False)
+ )
+ response_json = {
+ "results": secrets.items,
+ "pagination": {
+ "count": len(secrets.items),
+ "total": secrets.total,
+ "pages": secrets.pages,
+ },
+ }
+ return make_response(jsonify(response_json), 200)
+
+
+def secret_create(body: Dict) -> Response:
+ """Add secret."""
+ secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
+ return Response(
+ json.dumps(SecretModelSchema().dump(secret_model)),
+ status=201,
+ mimetype="application/json",
+ )
+
+
+def secret_update(key: str, body: dict) -> Response:
+ """Update secret."""
+ SecretService().update_secret(key, body["value"], g.user.id)
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
+
+
+def secret_delete(key: str) -> Response:
+ """Delete secret."""
+ current_user = UserService.current_user()
+ SecretService.delete_secret(key, current_user.id)
+ return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
diff --git a/src/spiffworkflow_backend/routes/service_tasks_controller.py b/src/spiffworkflow_backend/routes/service_tasks_controller.py
new file mode 100644
index 000000000..a1708ce8d
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/service_tasks_controller.py
@@ -0,0 +1,49 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+
+import flask.wrappers
+import werkzeug
+from flask import current_app
+from flask import g
+from flask import redirect
+from flask import request
+from flask.wrappers import Response
+
+from spiffworkflow_backend.routes.user import verify_token
+from spiffworkflow_backend.services.secret_service import SecretService
+from spiffworkflow_backend.services.service_task_service import ServiceTaskService
+
+
+def service_task_list() -> flask.wrappers.Response:
+ """Service_task_list."""
+ available_connectors = ServiceTaskService.available_connectors()
+ return Response(
+ json.dumps(available_connectors), status=200, mimetype="application/json"
+ )
+
+
+def authentication_list() -> flask.wrappers.Response:
+ """Authentication_list."""
+ available_authentications = ServiceTaskService.authentication_list()
+ response_json = {
+ "results": available_authentications,
+ "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"],
+ "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
+ }
+
+ return Response(json.dumps(response_json), status=200, mimetype="application/json")
+
+
+def authentication_callback(
+ service: str,
+ auth_method: str,
+) -> werkzeug.wrappers.Response:
+ """Authentication_callback."""
+ verify_token(request.args.get("token"), force_run=True)
+ response = request.args["response"]
+ SecretService().update_secret(
+ f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True
+ )
+ return redirect(
+ f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration"
+ )
diff --git a/src/spiffworkflow_backend/routes/tasks_controller.py b/src/spiffworkflow_backend/routes/tasks_controller.py
new file mode 100644
index 000000000..a7d3bf869
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/tasks_controller.py
@@ -0,0 +1,563 @@
+"""APIs for dealing with process groups, process models, and process instances."""
+import json
+import os
+import uuid
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import TypedDict
+from typing import Union
+
+import flask.wrappers
+import jinja2
+from flask import g
+from flask import jsonify
+from flask import make_response
+from flask.wrappers import Response
+from flask_bpmn.api.api_error import ApiError
+from flask_bpmn.models.db import db
+from SpiffWorkflow.task import Task as SpiffTask # type: ignore
+from SpiffWorkflow.task import TaskState
+from sqlalchemy import and_
+from sqlalchemy import asc
+from sqlalchemy import desc
+from sqlalchemy import func
+from sqlalchemy.orm import aliased
+
+from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
+from spiffworkflow_backend.models.process_model import ProcessModelInfo
+from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _find_principal_or_raise,
+)
+from spiffworkflow_backend.routes.process_api_blueprint import (
+ _find_process_instance_by_id_or_raise,
+)
+from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
+from spiffworkflow_backend.services.authorization_service import AuthorizationService
+from spiffworkflow_backend.services.file_system_service import FileSystemService
+from spiffworkflow_backend.services.process_instance_processor import (
+ ProcessInstanceProcessor,
+)
+from spiffworkflow_backend.services.process_instance_service import (
+ ProcessInstanceService,
+)
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
+from spiffworkflow_backend.services.spec_file_service import SpecFileService
+
+
+class TaskDataSelectOption(TypedDict):
+ """TaskDataSelectOption."""
+
+ value: str
+ label: str
+
+
+class ReactJsonSchemaSelectOption(TypedDict):
+ """ReactJsonSchemaSelectOption."""
+
+ type: str
+ title: str
+ enum: list[str]
+
+
+# TODO: see comment for before_request
+# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
+def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
+ """Task_list_my_tasks."""
+ principal = _find_principal_or_raise()
+ human_tasks = (
+ HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore
+ .join(ProcessInstanceModel)
+ .join(HumanTaskUserModel)
+ .filter_by(user_id=principal.user_id)
+ .filter(HumanTaskModel.completed == False) # noqa: E712
+ # just need this add_columns to add the process_model_identifier. Then add everything back that was removed.
+ .add_columns(
+ ProcessInstanceModel.process_model_identifier,
+ ProcessInstanceModel.process_model_display_name,
+ ProcessInstanceModel.status,
+ HumanTaskModel.task_name,
+ HumanTaskModel.task_title,
+ HumanTaskModel.task_type,
+ HumanTaskModel.task_status,
+ HumanTaskModel.task_id,
+ HumanTaskModel.id,
+ HumanTaskModel.process_model_display_name,
+ HumanTaskModel.process_instance_id,
+ )
+ .paginate(page=page, per_page=per_page, error_out=False)
+ )
+ tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items]
+
+ response_json = {
+ "results": tasks,
+ "pagination": {
+ "count": len(human_tasks.items),
+ "total": human_tasks.total,
+ "pages": human_tasks.pages,
+ },
+ }
+
+ return make_response(jsonify(response_json), 200)
+
+
+def task_list_for_my_open_processes(
+ page: int = 1, per_page: int = 100
+) -> flask.wrappers.Response:
+ """Task_list_for_my_open_processes."""
+ return _get_tasks(page=page, per_page=per_page)
+
+
+def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
+ """Task_list_for_me."""
+ return _get_tasks(
+ processes_started_by_user=False,
+ has_lane_assignment_id=False,
+ page=page,
+ per_page=per_page,
+ )
+
+
+def task_list_for_my_groups(
+ user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
+) -> flask.wrappers.Response:
+ """Task_list_for_my_groups."""
+ return _get_tasks(
+ user_group_identifier=user_group_identifier,
+ processes_started_by_user=False,
+ page=page,
+ per_page=per_page,
+ )
+
+
+def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
+ """Task_show."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+
+ if process_instance.status == ProcessInstanceStatus.suspended.value:
+ raise ApiError(
+ error_code="error_suspended",
+ message="The process instance is suspended",
+ status_code=400,
+ )
+
+ process_model = _get_process_model(
+ process_instance.process_model_identifier,
+ )
+
+ human_task = HumanTaskModel.query.filter_by(
+ process_instance_id=process_instance_id, task_id=task_id
+ ).first()
+ if human_task is None:
+ raise (
+ ApiError(
+ error_code="no_human_task",
+ message=(
+ f"Cannot find a task to complete for task id '{task_id}' and"
+ f" process instance {process_instance_id}."
+ ),
+ status_code=500,
+ )
+ )
+
+ form_schema_file_name = ""
+ form_ui_schema_file_name = ""
+ spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance)
+ extensions = spiff_task.task_spec.extensions
+
+ if "properties" in extensions:
+ properties = extensions["properties"]
+ if "formJsonSchemaFilename" in properties:
+ form_schema_file_name = properties["formJsonSchemaFilename"]
+ if "formUiSchemaFilename" in properties:
+ form_ui_schema_file_name = properties["formUiSchemaFilename"]
+ processor = ProcessInstanceProcessor(process_instance)
+ task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
+ task.data = spiff_task.data
+ task.process_model_display_name = process_model.display_name
+ task.process_model_identifier = process_model.id
+
+ process_model_with_form = process_model
+ refs = SpecFileService.get_references_for_process(process_model_with_form)
+ all_processes = [i.identifier for i in refs]
+ if task.process_identifier not in all_processes:
+ bpmn_file_full_path = (
+ ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
+ task.process_identifier
+ )
+ )
+ relative_path = os.path.relpath(
+ bpmn_file_full_path, start=FileSystemService.root_path()
+ )
+ process_model_relative_path = os.path.dirname(relative_path)
+ process_model_with_form = (
+ ProcessModelService.get_process_model_from_relative_path(
+ process_model_relative_path
+ )
+ )
+
+ if task.type == "User Task":
+ if not form_schema_file_name:
+ raise (
+ ApiError(
+ error_code="missing_form_file",
+ message=(
+ "Cannot find a form file for process_instance_id:"
+ f" {process_instance_id}, task_id: {task_id}"
+ ),
+ status_code=400,
+ )
+ )
+
+ form_contents = _prepare_form_data(
+ form_schema_file_name,
+ task.data,
+ process_model_with_form,
+ )
+
+ try:
+ # form_contents is a str
+ form_dict = json.loads(form_contents)
+ except Exception as exception:
+ raise (
+ ApiError(
+ error_code="error_loading_form",
+ message=(
+ f"Could not load form schema from: {form_schema_file_name}."
+ f" Error was: {str(exception)}"
+ ),
+ status_code=400,
+ )
+ ) from exception
+
+ if task.data:
+ _update_form_schema_with_task_data_as_needed(form_dict, task.data)
+
+ if form_contents:
+ task.form_schema = form_dict
+
+ if form_ui_schema_file_name:
+ ui_form_contents = _prepare_form_data(
+ form_ui_schema_file_name,
+ task.data,
+ process_model_with_form,
+ )
+ if ui_form_contents:
+ task.form_ui_schema = ui_form_contents
+
+ if task.properties and task.data and "instructionsForEndUser" in task.properties:
+ if task.properties["instructionsForEndUser"]:
+ task.properties["instructionsForEndUser"] = _render_jinja_template(
+ task.properties["instructionsForEndUser"], task.data
+ )
+ return make_response(jsonify(task), 200)
+
+
+def process_data_show(
+ process_instance_id: int,
+ process_data_identifier: str,
+ modified_process_model_identifier: str,
+) -> flask.wrappers.Response:
+ """Process_data_show."""
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ processor = ProcessInstanceProcessor(process_instance)
+ all_process_data = processor.get_data()
+ process_data_value = None
+ if process_data_identifier in all_process_data:
+ process_data_value = all_process_data[process_data_identifier]
+
+ return make_response(
+ jsonify(
+ {
+ "process_data_identifier": process_data_identifier,
+ "process_data_value": process_data_value,
+ }
+ ),
+ 200,
+ )
+
+
+def task_submit(
+ process_instance_id: int,
+ task_id: str,
+ body: Dict[str, Any],
+ terminate_loop: bool = False,
+) -> flask.wrappers.Response:
+ """Task_submit_user_data."""
+ principal = _find_principal_or_raise()
+ process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
+ if not process_instance.can_submit_task():
+ raise ApiError(
+ error_code="process_instance_not_runnable",
+ message=(
+ f"Process Instance ({process_instance.id}) has status "
+ f"{process_instance.status} which does not allow tasks to be submitted."
+ ),
+ status_code=400,
+ )
+
+ processor = ProcessInstanceProcessor(process_instance)
+ spiff_task = _get_spiff_task_from_process_instance(
+ task_id, process_instance, processor=processor
+ )
+ AuthorizationService.assert_user_can_complete_spiff_task(
+ process_instance.id, spiff_task, principal.user
+ )
+
+ if spiff_task.state != TaskState.READY:
+ raise (
+ ApiError(
+ error_code="invalid_state",
+ message="You may not update a task unless it is in the READY state.",
+ status_code=400,
+ )
+ )
+
+ if terminate_loop and spiff_task.is_looping():
+ spiff_task.terminate_loop()
+
+ human_task = HumanTaskModel.query.filter_by(
+ process_instance_id=process_instance_id, task_id=task_id, completed=False
+ ).first()
+ if human_task is None:
+ raise (
+ ApiError(
+ error_code="no_human_task",
+ message=(
+ f"Cannot find a task to complete for task id '{task_id}' and"
+ f" process instance {process_instance_id}."
+ ),
+ status_code=500,
+ )
+ )
+
+ ProcessInstanceService.complete_form_task(
+ processor=processor,
+ spiff_task=spiff_task,
+ data=body,
+ user=g.user,
+ human_task=human_task,
+ )
+
+ # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
+ # task spec, complete that form as well.
+ # if update_all:
+ # last_index = spiff_task.task_info()["mi_index"]
+ # next_task = processor.next_task()
+ # while next_task and next_task.task_info()["mi_index"] > last_index:
+ # __update_task(processor, next_task, form_data, user)
+ # last_index = next_task.task_info()["mi_index"]
+ # next_task = processor.next_task()
+
+ next_human_task_assigned_to_me = (
+ HumanTaskModel.query.filter_by(
+ process_instance_id=process_instance_id, completed=False
+ )
+ .order_by(asc(HumanTaskModel.id)) # type: ignore
+ .join(HumanTaskUserModel)
+ .filter_by(user_id=principal.user_id)
+ .first()
+ )
+ if next_human_task_assigned_to_me:
+ return make_response(
+ jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200
+ )
+
+ return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
+
+
+def _get_tasks(
+ processes_started_by_user: bool = True,
+ has_lane_assignment_id: bool = True,
+ page: int = 1,
+ per_page: int = 100,
+ user_group_identifier: Optional[str] = None,
+) -> flask.wrappers.Response:
+ """Get_tasks."""
+ user_id = g.user.id
+
+ # use distinct to ensure we only get one row per human task otherwise
+ # we can get back multiple for the same human task row which throws off
+ # pagination later on
+ # https://stackoverflow.com/q/34582014/6090676
+ human_tasks_query = (
+ db.session.query(HumanTaskModel)
+ .group_by(HumanTaskModel.id) # type: ignore
+ .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
+ .join(ProcessInstanceModel)
+ .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
+ .filter(HumanTaskModel.completed == False) # noqa: E712
+ )
+
+ assigned_user = aliased(UserModel)
+ if processes_started_by_user:
+ human_tasks_query = (
+ human_tasks_query.filter(
+ ProcessInstanceModel.process_initiator_id == user_id
+ )
+ .outerjoin(
+ HumanTaskUserModel,
+ HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+ )
+ .outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id)
+ )
+ else:
+ human_tasks_query = human_tasks_query.filter(
+ ProcessInstanceModel.process_initiator_id != user_id
+ ).join(
+ HumanTaskUserModel,
+ and_(
+ HumanTaskUserModel.user_id == user_id,
+ HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+ ),
+ )
+ if has_lane_assignment_id:
+ if user_group_identifier:
+ human_tasks_query = human_tasks_query.filter(
+ GroupModel.identifier == user_group_identifier
+ )
+ else:
+ human_tasks_query = human_tasks_query.filter(
+ HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
+ )
+ else:
+ human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
+
+ human_tasks = (
+ human_tasks_query.add_columns(
+ ProcessInstanceModel.process_model_identifier,
+ ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
+ ProcessInstanceModel.updated_at_in_seconds,
+ ProcessInstanceModel.created_at_in_seconds,
+ UserModel.username.label("process_initiator_username"), # type: ignore
+ GroupModel.identifier.label("assigned_user_group_identifier"),
+ HumanTaskModel.task_name,
+ HumanTaskModel.task_title,
+ HumanTaskModel.process_model_display_name,
+ HumanTaskModel.process_instance_id,
+ func.group_concat(assigned_user.username.distinct()).label(
+ "potential_owner_usernames"
+ ),
+ )
+ .order_by(desc(HumanTaskModel.id)) # type: ignore
+ .paginate(page=page, per_page=per_page, error_out=False)
+ )
+
+ response_json = {
+ "results": human_tasks.items,
+ "pagination": {
+ "count": len(human_tasks.items),
+ "total": human_tasks.total,
+ "pages": human_tasks.pages,
+ },
+ }
+
+ return make_response(jsonify(response_json), 200)
+
+
+def _prepare_form_data(
+ form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo
+) -> str:
+ """Prepare_form_data."""
+ if task_data is None:
+ return ""
+
+ file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
+ return _render_jinja_template(file_contents, task_data)
+
+
+def _render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str:
+ """Render_jinja_template."""
+ jinja_environment = jinja2.Environment(
+ autoescape=True, lstrip_blocks=True, trim_blocks=True
+ )
+ template = jinja_environment.from_string(unprocessed_template)
+ return template.render(**data)
+
+
+def _get_spiff_task_from_process_instance(
+ task_id: str,
+ process_instance: ProcessInstanceModel,
+ processor: Union[ProcessInstanceProcessor, None] = None,
+) -> SpiffTask:
+ """Get_spiff_task_from_process_instance."""
+ if processor is None:
+ processor = ProcessInstanceProcessor(process_instance)
+ task_uuid = uuid.UUID(task_id)
+ spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
+
+ if spiff_task is None:
+ raise (
+ ApiError(
+ error_code="empty_task",
+ message="Processor failed to obtain task.",
+ status_code=500,
+ )
+ )
+ return spiff_task
+
+
+# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
+def _update_form_schema_with_task_data_as_needed(
+ in_dict: dict, task_data: dict
+) -> None:
+ """Update_nested."""
+ for k, value in in_dict.items():
+ if "anyOf" == k:
+ # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"]
+ if isinstance(value, list):
+ if len(value) == 1:
+ first_element_in_value_list = value[0]
+ if isinstance(first_element_in_value_list, str):
+ if first_element_in_value_list.startswith(
+ "options_from_task_data_var:"
+ ):
+ task_data_var = first_element_in_value_list.replace(
+ "options_from_task_data_var:", ""
+ )
+
+ if task_data_var not in task_data:
+ raise (
+ ApiError(
+ error_code="missing_task_data_var",
+ message=(
+ "Task data is missing variable:"
+ f" {task_data_var}"
+ ),
+ status_code=500,
+ )
+ )
+
+ select_options_from_task_data = task_data.get(task_data_var)
+ if isinstance(select_options_from_task_data, list):
+ if all(
+ "value" in d and "label" in d
+ for d in select_options_from_task_data
+ ):
+
+ def map_function(
+ task_data_select_option: TaskDataSelectOption,
+ ) -> ReactJsonSchemaSelectOption:
+ """Map_function."""
+ return {
+ "type": "string",
+ "enum": [task_data_select_option["value"]],
+ "title": task_data_select_option["label"],
+ }
+
+ options_for_react_json_schema_form = list(
+ map(map_function, select_options_from_task_data)
+ )
+
+ in_dict[k] = options_for_react_json_schema_form
+ elif isinstance(value, dict):
+ _update_form_schema_with_task_data_as_needed(value, task_data)
+ elif isinstance(value, list):
+ for o in value:
+ if isinstance(o, dict):
+ _update_form_schema_with_task_data_as_needed(o, task_data)
diff --git a/src/spiffworkflow_backend/routes/user.py b/src/spiffworkflow_backend/routes/user.py
index ad98fbbc6..1ac6207c0 100644
--- a/src/spiffworkflow_backend/routes/user.py
+++ b/src/spiffworkflow_backend/routes/user.py
@@ -67,16 +67,19 @@ def verify_token(
user_model = get_user_from_decoded_internal_token(decoded_token)
except Exception as e:
current_app.logger.error(
- f"Exception in verify_token getting user from decoded internal token. {e}"
+ "Exception in verify_token getting user from decoded"
+ f" internal token. {e}"
)
elif "iss" in decoded_token.keys():
try:
if AuthenticationService.validate_id_token(token):
user_info = decoded_token
- except ApiError as ae: # API Error is only thrown in the token is outdated.
+ except (
+ ApiError
+ ) as ae: # API Error is only thrown in the token is outdated.
# Try to refresh the token
user = UserService.get_user_by_service_and_service_id(
- "open_id", decoded_token["sub"]
+ decoded_token["iss"], decoded_token["sub"]
)
if user:
refresh_token = AuthenticationService.get_refresh_token(user.id)
@@ -105,10 +108,12 @@ def verify_token(
) from e
if (
- user_info is not None and "error" not in user_info
+ user_info is not None
+ and "error" not in user_info
+ and "iss" in user_info
): # not sure what to test yet
user_model = (
- UserModel.query.filter(UserModel.service == "open_id")
+ UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
@@ -293,7 +298,6 @@ def get_decoded_token(token: str) -> Optional[Dict]:
try:
decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e:
- print(f"Exception in get_token_type: {e}")
raise ApiError(
error_code="invalid_token", message="Cannot decode token."
) from e
@@ -341,9 +345,5 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
)
if user:
return user
- user = UserModel(
- username=service_id,
- service=service,
- service_id=service_id,
- )
+ user = UserService.create_user(service_id, service, service_id)
return user
diff --git a/src/spiffworkflow_backend/routes/user_blueprint.py b/src/spiffworkflow_backend/routes/user_blueprint.py
index 29bbddcd1..fd5c1ae90 100644
--- a/src/spiffworkflow_backend/routes/user_blueprint.py
+++ b/src/spiffworkflow_backend/routes/user_blueprint.py
@@ -26,6 +26,7 @@ user_blueprint = Blueprint("main", __name__)
# user = UserService.create_user('internal', username)
# return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON)
+
# def _create_user(username):
# user = UserModel.query.filter_by(username=username).first()
# if user is not None:
diff --git a/src/spiffworkflow_backend/routes/users_controller.py b/src/spiffworkflow_backend/routes/users_controller.py
new file mode 100644
index 000000000..5dce5b43e
--- /dev/null
+++ b/src/spiffworkflow_backend/routes/users_controller.py
@@ -0,0 +1,26 @@
+"""Users_controller."""
+import flask
+from flask import g
+from flask import jsonify
+from flask import make_response
+
+from spiffworkflow_backend.models.user import UserModel
+
+
+def user_search(username_prefix: str) -> flask.wrappers.Response:
+ """User_search."""
+ found_users = UserModel.query.filter(UserModel.username.like(f"{username_prefix}%")).all() # type: ignore
+
+ response_json = {
+ "users": found_users,
+ "username_prefix": username_prefix,
+ }
+ return make_response(jsonify(response_json), 200)
+
+
+def user_group_list_for_current_user() -> flask.wrappers.Response:
+ """User_group_list_for_current_user."""
+ groups = g.user.groups
+ # TODO: filter out the default group and have a way to know what is the default group
+ group_identifiers = [i.identifier for i in groups if i.identifier != "everybody"]
+ return make_response(jsonify(sorted(group_identifiers)), 200)
diff --git a/src/spiffworkflow_backend/scripts/add_user_to_group.py b/src/spiffworkflow_backend/scripts/add_user_to_group.py
deleted file mode 100644
index d3c777118..000000000
--- a/src/spiffworkflow_backend/scripts/add_user_to_group.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""Get_env."""
-from typing import Any
-
-from spiffworkflow_backend.models.group import GroupModel
-from spiffworkflow_backend.models.group import GroupNotFoundError
-from spiffworkflow_backend.models.script_attributes_context import (
- ScriptAttributesContext,
-)
-from spiffworkflow_backend.models.user import UserModel
-from spiffworkflow_backend.models.user import UserNotFoundError
-from spiffworkflow_backend.scripts.script import Script
-from spiffworkflow_backend.services.user_service import UserService
-
-
-class AddUserToGroup(Script):
- """AddUserToGroup."""
-
- def get_description(self) -> str:
- """Get_description."""
- return """Add a given user to a given group."""
-
- def run(
- self,
- script_attributes_context: ScriptAttributesContext,
- *args: Any,
- **kwargs: Any,
- ) -> Any:
- """Run."""
- username = args[0]
- group_identifier = args[1]
- user = UserModel.query.filter_by(username=username).first()
- if user is None:
- raise UserNotFoundError(
- f"Script 'add_user_to_group' could not find a user with username: {username}"
- )
-
- group = GroupModel.query.filter_by(identifier=group_identifier).first()
- if group is None:
- raise GroupNotFoundError(
- f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'."
- )
-
- UserService.add_user_to_group(user, group)
diff --git a/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py
new file mode 100644
index 000000000..5b4225253
--- /dev/null
+++ b/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py
@@ -0,0 +1,63 @@
+"""Delete_process_instances_with_criteria."""
+from time import time
+from typing import Any
+
+from flask_bpmn.models.db import db
+from sqlalchemy import or_
+
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.script_attributes_context import (
+ ScriptAttributesContext,
+)
+from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
+from spiffworkflow_backend.scripts.script import Script
+
+
+class DeleteProcessInstancesWithCriteria(Script):
+ """DeleteProcessInstancesWithCriteria."""
+
+ def get_description(self) -> str:
+ """Get_description."""
+ return "Delete process instances that match the provided criteria,"
+
+ def run(
+ self,
+ script_attributes_context: ScriptAttributesContext,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Any:
+ """Run."""
+ criteria_list = args[0]
+
+ delete_criteria = []
+ delete_time = time()
+
+ for criteria in criteria_list:
+ delete_criteria.append(
+ (ProcessInstanceModel.process_model_identifier == criteria["name"])
+ & ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore
+ & (
+ ProcessInstanceModel.updated_at_in_seconds
+ < (delete_time - criteria["last_updated_delta"])
+ )
+ )
+
+ results = (
+ ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all()
+ )
+ rows_affected = len(results)
+
+ if rows_affected > 0:
+ ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore
+
+ step_details = SpiffStepDetailsModel.query.filter(
+ SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore
+ ).all()
+
+ for deletion in step_details:
+ db.session.delete(deletion)
+ for deletion in results:
+ db.session.delete(deletion)
+ db.session.commit()
+
+ return rows_affected
diff --git a/src/spiffworkflow_backend/scripts/fact_service.py b/src/spiffworkflow_backend/scripts/fact_service.py
index ee86a84a7..c739d15aa 100644
--- a/src/spiffworkflow_backend/scripts/fact_service.py
+++ b/src/spiffworkflow_backend/scripts/fact_service.py
@@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
class FactService(Script):
"""FactService."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Just your basic class that can pull in data from a few api endpoints and
@@ -30,7 +35,10 @@ class FactService(Script):
if fact == "cat":
details = "The cat in the hat" # self.get_cat()
elif fact == "norris":
- details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants."
+ details = (
+ "Chuck Norris doesn’t read books. He stares them down until he gets the"
+ " information he wants."
+ )
elif fact == "buzzword":
details = "Move the Needle." # self.get_buzzword()
else:
diff --git a/src/spiffworkflow_backend/scripts/get_all_permissions.py b/src/spiffworkflow_backend/scripts/get_all_permissions.py
new file mode 100644
index 000000000..e2ab07637
--- /dev/null
+++ b/src/spiffworkflow_backend/scripts/get_all_permissions.py
@@ -0,0 +1,71 @@
+"""Get_env."""
+from collections import OrderedDict
+from typing import Any
+
+from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
+from spiffworkflow_backend.models.permission_target import PermissionTargetModel
+from spiffworkflow_backend.models.principal import PrincipalModel
+from spiffworkflow_backend.models.script_attributes_context import (
+ ScriptAttributesContext,
+)
+from spiffworkflow_backend.scripts.script import Script
+
+
+class GetAllPermissions(Script):
+ """GetAllPermissions."""
+
+ def get_description(self) -> str:
+ """Get_description."""
+ return """Get all permissions currently in the system."""
+
+ def run(
+ self,
+ script_attributes_context: ScriptAttributesContext,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Any:
+ """Run."""
+ permission_assignments = (
+ PermissionAssignmentModel.query.join(
+ PrincipalModel,
+ PrincipalModel.id == PermissionAssignmentModel.principal_id,
+ )
+ .join(GroupModel, GroupModel.id == PrincipalModel.group_id)
+ .join(
+ PermissionTargetModel,
+ PermissionTargetModel.id
+ == PermissionAssignmentModel.permission_target_id,
+ )
+ .add_columns(
+ PermissionAssignmentModel.permission,
+ PermissionTargetModel.uri,
+ GroupModel.identifier.label("group_identifier"),
+ )
+ )
+
+ permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict()
+ for pa in permission_assignments:
+ permissions.setdefault((pa.group_identifier, pa.uri), []).append(
+ pa.permission
+ )
+
+ def replace_suffix(string: str, old: str, new: str) -> str:
+ """Replace_suffix."""
+ if string.endswith(old):
+ return string[: -len(old)] + new
+ return string
+
+ # sort list of strings based on a specific order
+ def sort_by_order(string_list: list, order: list) -> list:
+ """Sort_by_order."""
+ return sorted(string_list, key=lambda x: order.index(x))
+
+ return [
+ {
+ "group_identifier": k[0],
+ "uri": replace_suffix(k[1], "%", "*"),
+ "permissions": sort_by_order(v, ["create", "read", "update", "delete"]),
+ }
+ for k, v in permissions.items()
+ ]
diff --git a/src/spiffworkflow_backend/scripts/get_current_user.py b/src/spiffworkflow_backend/scripts/get_current_user.py
index a1a1b47e9..66d21a4ca 100644
--- a/src/spiffworkflow_backend/scripts/get_current_user.py
+++ b/src/spiffworkflow_backend/scripts/get_current_user.py
@@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetCurrentUser(Script):
"""GetCurrentUser."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Return the current user."""
diff --git a/src/spiffworkflow_backend/scripts/get_env.py b/src/spiffworkflow_backend/scripts/get_env.py
index cd586ae00..7a6b0f44c 100644
--- a/src/spiffworkflow_backend/scripts/get_env.py
+++ b/src/spiffworkflow_backend/scripts/get_env.py
@@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetEnv(Script):
"""GetEnv."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Returns the current environment - ie testing, staging, production."""
diff --git a/src/spiffworkflow_backend/scripts/get_frontend_url.py b/src/spiffworkflow_backend/scripts/get_frontend_url.py
index 9490df95a..b128214ab 100644
--- a/src/spiffworkflow_backend/scripts/get_frontend_url.py
+++ b/src/spiffworkflow_backend/scripts/get_frontend_url.py
@@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetFrontendUrl(Script):
"""GetFrontendUrl."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Return the url to the frontend."""
diff --git a/src/spiffworkflow_backend/scripts/get_group_members.py b/src/spiffworkflow_backend/scripts/get_group_members.py
index 243a8c524..0f20fbb3c 100644
--- a/src/spiffworkflow_backend/scripts/get_group_members.py
+++ b/src/spiffworkflow_backend/scripts/get_group_members.py
@@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetGroupMembers(Script):
"""GetGroupMembers."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Return the list of usernames of the users in the given group."""
@@ -27,7 +32,8 @@ class GetGroupMembers(Script):
group = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None:
raise GroupNotFoundError(
- f"Script 'get_group_members' could not find group with identifier '{group_identifier}'."
+ "Script 'get_group_members' could not find group with identifier"
+ f" '{group_identifier}'."
)
usernames = [u.username for u in group.users]
diff --git a/src/spiffworkflow_backend/scripts/get_localtime.py b/src/spiffworkflow_backend/scripts/get_localtime.py
index 689b86d8c..7c688e56f 100644
--- a/src/spiffworkflow_backend/scripts/get_localtime.py
+++ b/src/spiffworkflow_backend/scripts/get_localtime.py
@@ -14,6 +14,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetLocaltime(Script):
"""GetLocaltime."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Converts a Datetime object into a Datetime object for a specific timezone.
diff --git a/src/spiffworkflow_backend/scripts/get_process_info.py b/src/spiffworkflow_backend/scripts/get_process_info.py
index 45c70d6ba..99eb4ce26 100644
--- a/src/spiffworkflow_backend/scripts/get_process_info.py
+++ b/src/spiffworkflow_backend/scripts/get_process_info.py
@@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script
class GetProcessInfo(Script):
"""GetProcessInfo."""
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """We have deemed this function safe to run without elevated permissions."""
+ return False
+
def get_description(self) -> str:
"""Get_description."""
return """Returns a dictionary of information about the currently running process."""
@@ -23,5 +28,7 @@ class GetProcessInfo(Script):
"""Run."""
return {
"process_instance_id": script_attributes_context.process_instance_id,
- "process_model_identifier": script_attributes_context.process_model_identifier,
+ "process_model_identifier": (
+ script_attributes_context.process_model_identifier
+ ),
}
diff --git a/src/spiffworkflow_backend/scripts/get_secret.py b/src/spiffworkflow_backend/scripts/get_secret.py
new file mode 100644
index 000000000..1715b6a14
--- /dev/null
+++ b/src/spiffworkflow_backend/scripts/get_secret.py
@@ -0,0 +1,25 @@
+"""Get_secret."""
+from typing import Any
+
+from spiffworkflow_backend.models.script_attributes_context import (
+ ScriptAttributesContext,
+)
+from spiffworkflow_backend.scripts.script import Script
+from spiffworkflow_backend.services.secret_service import SecretService
+
+
+class GetSecret(Script):
+ """GetSecret."""
+
+ def get_description(self) -> str:
+ """Get_description."""
+ return """Returns the value for a previously configured secret."""
+
+ def run(
+ self,
+ script_attributes_context: ScriptAttributesContext,
+ *args: Any,
+ **kwargs: Any
+ ) -> Any:
+ """Run."""
+ return SecretService.get_secret(args[0]).value
diff --git a/src/spiffworkflow_backend/scripts/refresh_permissions.py b/src/spiffworkflow_backend/scripts/refresh_permissions.py
new file mode 100644
index 000000000..4981af93d
--- /dev/null
+++ b/src/spiffworkflow_backend/scripts/refresh_permissions.py
@@ -0,0 +1,39 @@
+"""Get_env."""
+from typing import Any
+
+from spiffworkflow_backend.models.script_attributes_context import (
+ ScriptAttributesContext,
+)
+from spiffworkflow_backend.scripts.script import Script
+from spiffworkflow_backend.services.authorization_service import AuthorizationService
+
+
+class RefreshPermissions(Script):
+ """RefreshPermissions."""
+
+ def get_description(self) -> str:
+ """Get_description."""
+ return """Add permissions using a dict.
+ group_info: [
+ {
+ 'name': group_identifier,
+ 'users': array_of_users,
+ 'permissions': [
+ {
+ 'actions': array_of_actions - create, read, etc,
+ 'uri': target_uri
+ }
+ ]
+ }
+ ]
+ """
+
+ def run(
+ self,
+ script_attributes_context: ScriptAttributesContext,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Any:
+ """Run."""
+ group_info = args[0]
+ AuthorizationService.refresh_permissions(group_info)
diff --git a/src/spiffworkflow_backend/scripts/script.py b/src/spiffworkflow_backend/scripts/script.py
index b744694a2..7ca798466 100644
--- a/src/spiffworkflow_backend/scripts/script.py
+++ b/src/spiffworkflow_backend/scripts/script.py
@@ -10,9 +10,12 @@ from typing import Callable
from flask_bpmn.api.api_error import ApiError
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceNotFoundError
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
+from spiffworkflow_backend.services.authorization_service import AuthorizationService
# Generally speaking, having some global in a flask app is TERRIBLE.
# This is here, because after loading the application this will never change under
@@ -20,6 +23,10 @@ from spiffworkflow_backend.models.script_attributes_context import (
SCRIPT_SUB_CLASSES = None
+class ScriptUnauthorizedForUserError(Exception):
+ """ScriptUnauthorizedForUserError."""
+
+
class Script:
"""Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks."""
@@ -43,6 +50,15 @@ class Script:
+ "does not properly implement the run function.",
)
+ @staticmethod
+ def requires_privileged_permissions() -> bool:
+ """It seems safer to default to True and make safe functions opt in for any user to run them.
+
+ To give access to script for a given user, add a 'create' permission with following target-uri:
+ '/can-run-privileged-script/{script_name}'
+ """
+ return True
+
@staticmethod
def generate_augmented_list(
script_attributes_context: ScriptAttributesContext,
@@ -71,18 +87,52 @@ class Script:
that we created.
"""
instance = subclass()
- return lambda *ar, **kw: subclass.run(
- instance,
- script_attributes_context,
- *ar,
- **kw,
- )
+
+ def check_script_permission() -> None:
+ """Check_script_permission."""
+ if subclass.requires_privileged_permissions():
+ script_function_name = get_script_function_name(subclass)
+ uri = f"/can-run-privileged-script/{script_function_name}"
+ process_instance = ProcessInstanceModel.query.filter_by(
+ id=script_attributes_context.process_instance_id
+ ).first()
+ if process_instance is None:
+ raise ProcessInstanceNotFoundError(
+ "Could not find a process instance with id"
+ f" '{script_attributes_context.process_instance_id}' when"
+ f" running script '{script_function_name}'"
+ )
+ user = process_instance.process_initiator
+ has_permission = AuthorizationService.user_has_permission(
+ user=user, permission="create", target_uri=uri
+ )
+ if not has_permission:
+ raise ScriptUnauthorizedForUserError(
+ f"User {user.username} does not have access to run"
+ f" privileged script '{script_function_name}'"
+ )
+
+ def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
+ """Run_script_if_allowed."""
+ check_script_permission()
+ return subclass.run(
+ instance,
+ script_attributes_context,
+ *ar,
+ **kw,
+ )
+
+ return run_script_if_allowed
+
+ def get_script_function_name(subclass: type[Script]) -> str:
+ """Get_script_function_name."""
+ return subclass.__module__.split(".")[-1]
execlist = {}
subclasses = Script.get_all_subclasses()
for x in range(len(subclasses)):
subclass = subclasses[x]
- execlist[subclass.__module__.split(".")[-1]] = make_closure(
+ execlist[get_script_function_name(subclass)] = make_closure(
subclass, script_attributes_context=script_attributes_context
)
return execlist
@@ -101,7 +151,7 @@ class Script:
"""_get_all_subclasses."""
# hackish mess to make sure we have all the modules loaded for the scripts
pkg_dir = os.path.dirname(__file__)
- for (_module_loader, name, _ispkg) in pkgutil.iter_modules([pkg_dir]):
+ for _module_loader, name, _ispkg in pkgutil.iter_modules([pkg_dir]):
importlib.import_module("." + name, __package__)
"""Returns a list of all classes that extend this class."""
diff --git a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py
index 81488910e..6bbcad331 100644
--- a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py
+++ b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py
@@ -29,7 +29,6 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
# suspended - 6 hours ago
process_instances = []
for i in range(len(statuses)):
-
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
test_process_model_id, user
)
diff --git a/src/spiffworkflow_backend/services/authentication_service.py b/src/spiffworkflow_backend/services/authentication_service.py
index 95c1eaa89..f697904ea 100644
--- a/src/spiffworkflow_backend/services/authentication_service.py
+++ b/src/spiffworkflow_backend/services/authentication_service.py
@@ -52,12 +52,15 @@ class AuthenticationService:
@classmethod
def open_id_endpoint_for_name(cls, name: str) -> str:
"""All openid systems provide a mapping of static names to the full path of that endpoint."""
+ openid_config_url = f"{cls.server_url()}/.well-known/openid-configuration"
if name not in AuthenticationService.ENDPOINT_CACHE:
- request_url = f"{cls.server_url()}/.well-known/openid-configuration"
- response = requests.get(request_url)
+ response = requests.get(openid_config_url)
AuthenticationService.ENDPOINT_CACHE = response.json()
if name not in AuthenticationService.ENDPOINT_CACHE:
- raise Exception(f"Unknown OpenID Endpoint: {name}")
+ raise Exception(
+ f"Unknown OpenID Endpoint: {name}. Tried to get from"
+ f" {openid_config_url}"
+ )
return AuthenticationService.ENDPOINT_CACHE.get(name, "")
@staticmethod
@@ -93,7 +96,7 @@ class AuthenticationService:
+ f"?state={state}&"
+ "response_type=code&"
+ f"client_id={self.client_id()}&"
- + "scope=openid&"
+ + "scope=openid profile email&"
+ f"redirect_uri={return_redirect_url}"
)
return login_redirect_url
diff --git a/src/spiffworkflow_backend/services/authorization_service.py b/src/spiffworkflow_backend/services/authorization_service.py
index 9456f8f14..9abe25970 100644
--- a/src/spiffworkflow_backend/services/authorization_service.py
+++ b/src/spiffworkflow_backend/services/authorization_service.py
@@ -1,10 +1,14 @@
"""Authorization_service."""
import inspect
import re
+from dataclasses import dataclass
from hashlib import sha256
from hmac import compare_digest
from hmac import HMAC
+from typing import Any
from typing import Optional
+from typing import Set
+from typing import TypedDict
from typing import Union
import jwt
@@ -19,8 +23,9 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from sqlalchemy import or_
from sqlalchemy import text
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
+from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import MissingPrincipalError
@@ -37,14 +42,48 @@ class PermissionsFileNotSetError(Exception):
"""PermissionsFileNotSetError."""
-class ActiveTaskNotFoundError(Exception):
- """ActiveTaskNotFoundError."""
+class HumanTaskNotFoundError(Exception):
+ """HumanTaskNotFoundError."""
class UserDoesNotHaveAccessToTaskError(Exception):
"""UserDoesNotHaveAccessToTaskError."""
+class InvalidPermissionError(Exception):
+ """InvalidPermissionError."""
+
+
+@dataclass
+class PermissionToAssign:
+ """PermissionToAssign."""
+
+ permission: str
+ target_uri: str
+
+
+# the relevant permissions are the only API methods that are currently available for each path prefix.
+# if we add further API methods, we'll need to evaluate whether they should be added here.
+PATH_SEGMENTS_FOR_PERMISSION_ALL = [
+ {"path": "/logs", "relevant_permissions": ["read"]},
+ {
+ "path": "/process-instances",
+ "relevant_permissions": ["create", "read", "delete"],
+ },
+ {"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
+ {"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
+ {"path": "/task-data", "relevant_permissions": ["read", "update"]},
+ {"path": "/process-data", "relevant_permissions": ["read"]},
+]
+
+
+class DesiredPermissionDict(TypedDict):
+ """DesiredPermissionDict."""
+
+ group_identifiers: Set[str]
+ permission_assignments: list[PermissionAssignmentModel]
+
+
class AuthorizationService:
"""Determine whether a user has permission to perform their request."""
@@ -75,6 +114,7 @@ class AuthorizationService:
) -> bool:
"""Has_permission."""
principal_ids = [p.id for p in principals]
+ target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX)
permission_assignments = (
PermissionAssignmentModel.query.filter(
@@ -84,10 +124,13 @@ class AuthorizationService:
.join(PermissionTargetModel)
.filter(
or_(
- text(f"'{target_uri}' LIKE permission_target.uri"),
+ text(f"'{target_uri_normalized}' LIKE permission_target.uri"),
# to check for exact matches as well
# see test_user_can_access_base_path_when_given_wildcard_permission unit test
- text(f"'{target_uri}' = replace(permission_target.uri, '/%', '')"),
+ text(
+ f"'{target_uri_normalized}' ="
+ " replace(replace(permission_target.uri, '/%', ''), ':%', '')"
+ ),
)
)
.all()
@@ -127,17 +170,15 @@ class AuthorizationService:
return cls.has_permission(principals, permission, target_uri)
@classmethod
- def delete_all_permissions_and_recreate(cls) -> None:
- """Delete_all_permissions_and_recreate."""
+ def delete_all_permissions(cls) -> None:
+ """Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?"""
for model in [PermissionAssignmentModel, PermissionTargetModel]:
db.session.query(model).delete()
# cascading to principals doesn't seem to work when attempting to delete all so do it like this instead
for group in GroupModel.query.all():
db.session.delete(group)
-
db.session.commit()
- cls.import_permissions_from_yaml_file()
@classmethod
def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None:
@@ -155,12 +196,13 @@ class AuthorizationService:
@classmethod
def import_permissions_from_yaml_file(
cls, raise_if_missing_user: bool = False
- ) -> None:
+ ) -> DesiredPermissionDict:
"""Import_permissions_from_yaml_file."""
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
raise (
PermissionsFileNotSetError(
- "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions"
+ "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in"
+ " order to import permissions"
)
)
@@ -169,13 +211,16 @@ class AuthorizationService:
permission_configs = yaml.safe_load(file)
default_group = None
+ unique_user_group_identifiers: Set[str] = set()
if "default_group" in permission_configs:
default_group_identifier = permission_configs["default_group"]
default_group = GroupService.find_or_create_group(default_group_identifier)
+ unique_user_group_identifiers.add(default_group_identifier)
if "groups" in permission_configs:
for group_identifier, group_config in permission_configs["groups"].items():
group = GroupService.find_or_create_group(group_identifier)
+ unique_user_group_identifiers.add(group_identifier)
for username in group_config["users"]:
user = UserModel.query.filter_by(username=username).first()
if user is None:
@@ -188,26 +233,25 @@ class AuthorizationService:
continue
cls.associate_user_with_group(user, group)
+ permission_assignments = []
if "permissions" in permission_configs:
for _permission_identifier, permission_config in permission_configs[
"permissions"
].items():
uri = permission_config["uri"]
- uri_with_percent = re.sub(r"\*", "%", uri)
- permission_target = PermissionTargetModel.query.filter_by(
- uri=uri_with_percent
- ).first()
- if permission_target is None:
- permission_target = PermissionTargetModel(uri=uri_with_percent)
- db.session.add(permission_target)
- db.session.commit()
+ permission_target = cls.find_or_create_permission_target(uri)
for allowed_permission in permission_config["allowed_permissions"]:
if "groups" in permission_config:
for group_identifier in permission_config["groups"]:
group = GroupService.find_or_create_group(group_identifier)
- cls.create_permission_for_principal(
- group.principal, permission_target, allowed_permission
+ unique_user_group_identifiers.add(group_identifier)
+ permission_assignments.append(
+ cls.create_permission_for_principal(
+ group.principal,
+ permission_target,
+ allowed_permission,
+ )
)
if "users" in permission_config:
for username in permission_config["users"]:
@@ -218,14 +262,35 @@ class AuthorizationService:
.filter(UserModel.username == username)
.first()
)
- cls.create_permission_for_principal(
- principal, permission_target, allowed_permission
+ permission_assignments.append(
+ cls.create_permission_for_principal(
+ principal, permission_target, allowed_permission
+ )
)
if default_group is not None:
for user in UserModel.query.all():
cls.associate_user_with_group(user, default_group)
+ return {
+ "group_identifiers": unique_user_group_identifiers,
+ "permission_assignments": permission_assignments,
+ }
+
+ @classmethod
+ def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel:
+ """Find_or_create_permission_target."""
+ uri_with_percent = re.sub(r"\*", "%", uri)
+ target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
+ permission_target: Optional[PermissionTargetModel] = (
+ PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
+ )
+ if permission_target is None:
+ permission_target = PermissionTargetModel(uri=target_uri_normalized)
+ db.session.add(permission_target)
+ db.session.commit()
+ return permission_target
+
@classmethod
def create_permission_for_principal(
cls,
@@ -234,13 +299,13 @@ class AuthorizationService:
permission: str,
) -> PermissionAssignmentModel:
"""Create_permission_for_principal."""
- permission_assignment: Optional[
- PermissionAssignmentModel
- ] = PermissionAssignmentModel.query.filter_by(
- principal_id=principal.id,
- permission_target_id=permission_target.id,
- permission=permission,
- ).first()
+ permission_assignment: Optional[PermissionAssignmentModel] = (
+ PermissionAssignmentModel.query.filter_by(
+ principal_id=principal.id,
+ permission_target_id=permission_target.id,
+ permission=permission,
+ ).first()
+ )
if permission_assignment is None:
permission_assignment = PermissionAssignmentModel(
principal_id=principal.id,
@@ -340,7 +405,10 @@ class AuthorizationService:
raise ApiError(
error_code="unauthorized",
- message=f"User {g.user.username} is not authorized to perform requested action: {permission_string} - {request.path}",
+ message=(
+ f"User {g.user.username} is not authorized to perform requested action:"
+ f" {permission_string} - {request.path}"
+ ),
status_code=403,
)
@@ -419,7 +487,10 @@ class AuthorizationService:
except jwt.InvalidTokenError as exception:
raise ApiError(
"token_invalid",
- "The Authentication token you provided is invalid. You need a new token. ",
+ (
+ "The Authentication token you provided is invalid. You need a new"
+ " token. "
+ ),
) from exception
@staticmethod
@@ -429,53 +500,69 @@ class AuthorizationService:
user: UserModel,
) -> bool:
"""Assert_user_can_complete_spiff_task."""
- active_task = ActiveTaskModel.query.filter_by(
+ human_task = HumanTaskModel.query.filter_by(
task_name=spiff_task.task_spec.name,
process_instance_id=process_instance_id,
).first()
- if active_task is None:
- raise ActiveTaskNotFoundError(
- f"Could find an active task with task name '{spiff_task.task_spec.name}'"
+ if human_task is None:
+ raise HumanTaskNotFoundError(
+ f"Could find an human task with task name '{spiff_task.task_spec.name}'"
f" for process instance '{process_instance_id}'"
)
- if user not in active_task.potential_owners:
+ if user not in human_task.potential_owners:
raise UserDoesNotHaveAccessToTaskError(
- f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'"
- f" for process instance '{process_instance_id}'"
+ f"User {user.username} does not have access to update"
+ f" task'{spiff_task.task_spec.name}' for process instance"
+ f" '{process_instance_id}'"
)
return True
@classmethod
def create_user_from_sign_in(cls, user_info: dict) -> UserModel:
"""Create_user_from_sign_in."""
+ """Name, family_name, given_name, middle_name, nickname, preferred_username,"""
+ """Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """
+ """Email."""
is_new_user = False
user_model = (
- UserModel.query.filter(UserModel.service == "open_id")
+ UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
+ email = display_name = username = ""
+ if "email" in user_info:
+ username = user_info["email"]
+ email = user_info["email"]
+ else: # we fall back to the sub, which may be very ugly.
+ username = user_info["sub"] + "@" + user_info["iss"]
+
+ if "preferred_username" in user_info:
+ display_name = user_info["preferred_username"]
+ elif "nickname" in user_info:
+ display_name = user_info["nickname"]
+ elif "name" in user_info:
+ display_name = user_info["name"]
if user_model is None:
current_app.logger.debug("create_user in login_return")
is_new_user = True
- name = username = email = ""
- if "name" in user_info:
- name = user_info["name"]
- if "username" in user_info:
- username = user_info["username"]
- elif "preferred_username" in user_info:
- username = user_info["preferred_username"]
- if "email" in user_info:
- email = user_info["email"]
user_model = UserService().create_user(
- service="open_id",
- service_id=user_info["sub"],
- name=name,
username=username,
+ service=user_info["iss"],
+ service_id=user_info["sub"],
email=email,
+ display_name=display_name,
)
+ else:
+ # Update with the latest information
+ user_model.username = username
+ user_model.email = email
+ user_model.display_name = display_name
+ user_model.service = user_info["iss"]
+ user_model.service_id = user_info["sub"]
+
# this may eventually get too slow.
# when it does, be careful about backgrounding, because
# the user will immediately need permissions to use the site.
@@ -485,11 +572,255 @@ class AuthorizationService:
cls.import_permissions_from_yaml_file()
if is_new_user:
- UserService.add_user_to_active_tasks_if_appropriate(user_model)
+ UserService.add_user_to_human_tasks_if_appropriate(user_model)
# this cannot be None so ignore mypy
return user_model # type: ignore
+ @classmethod
+ def get_permissions_to_assign(
+ cls,
+ permission_set: str,
+ process_related_path_segment: str,
+ target_uris: list[str],
+ ) -> list[PermissionToAssign]:
+ """Get_permissions_to_assign."""
+ permissions = permission_set.split(",")
+ if permission_set == "all":
+ permissions = ["create", "read", "update", "delete"]
+
+ permissions_to_assign: list[PermissionToAssign] = []
+
+ # we were thinking that if you can start an instance, you ought to be able to view your own instances.
+ if permission_set == "start":
+ target_uri = f"/process-instances/{process_related_path_segment}"
+ permissions_to_assign.append(
+ PermissionToAssign(permission="create", target_uri=target_uri)
+ )
+ target_uri = f"/process-instances/for-me/{process_related_path_segment}"
+ permissions_to_assign.append(
+ PermissionToAssign(permission="read", target_uri=target_uri)
+ )
+
+ else:
+ if permission_set == "all":
+ for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
+ target_uri = (
+ f"{path_segment_dict['path']}/{process_related_path_segment}"
+ )
+ relevant_permissions = path_segment_dict["relevant_permissions"]
+ for permission in relevant_permissions:
+ permissions_to_assign.append(
+ PermissionToAssign(
+ permission=permission, target_uri=target_uri
+ )
+ )
+
+ for target_uri in target_uris:
+ for permission in permissions:
+ permissions_to_assign.append(
+ PermissionToAssign(permission=permission, target_uri=target_uri)
+ )
+
+ return permissions_to_assign
+
+ @classmethod
+ def set_basic_permissions(cls) -> list[PermissionToAssign]:
+ """Set_basic_permissions."""
+ permissions_to_assign: list[PermissionToAssign] = []
+ permissions_to_assign.append(
+ PermissionToAssign(
+ permission="read", target_uri="/process-instances/for-me"
+ )
+ )
+ permissions_to_assign.append(
+ PermissionToAssign(permission="read", target_uri="/processes")
+ )
+ permissions_to_assign.append(
+ PermissionToAssign(permission="read", target_uri="/service-tasks")
+ )
+ permissions_to_assign.append(
+ PermissionToAssign(
+ permission="read", target_uri="/user-groups/for-current-user"
+ )
+ )
+ permissions_to_assign.append(
+ PermissionToAssign(
+ permission="read", target_uri="/process-instances/find-by-id/*"
+ )
+ )
+
+ for permission in ["create", "read", "update", "delete"]:
+ permissions_to_assign.append(
+ PermissionToAssign(
+ permission=permission, target_uri="/process-instances/reports/*"
+ )
+ )
+ permissions_to_assign.append(
+ PermissionToAssign(permission=permission, target_uri="/tasks/*")
+ )
+ return permissions_to_assign
+
+ @classmethod
+ def set_process_group_permissions(
+ cls, target: str, permission_set: str
+ ) -> list[PermissionToAssign]:
+ """Set_process_group_permissions."""
+ permissions_to_assign: list[PermissionToAssign] = []
+ process_group_identifier = (
+ target.removeprefix("PG:").replace("/", ":").removeprefix(":")
+ )
+ process_related_path_segment = f"{process_group_identifier}:*"
+ if process_group_identifier == "ALL":
+ process_related_path_segment = "*"
+ target_uris = [
+ f"/process-groups/{process_related_path_segment}",
+ f"/process-models/{process_related_path_segment}",
+ ]
+ permissions_to_assign = permissions_to_assign + cls.get_permissions_to_assign(
+ permission_set, process_related_path_segment, target_uris
+ )
+ return permissions_to_assign
+
+ @classmethod
+ def set_process_model_permissions(
+ cls, target: str, permission_set: str
+ ) -> list[PermissionToAssign]:
+ """Set_process_model_permissions."""
+ permissions_to_assign: list[PermissionToAssign] = []
+ process_model_identifier = (
+ target.removeprefix("PM:").replace("/", ":").removeprefix(":")
+ )
+ process_related_path_segment = f"{process_model_identifier}/*"
+
+ if process_model_identifier == "ALL":
+ process_related_path_segment = "*"
+
+ target_uris = [f"/process-models/{process_related_path_segment}"]
+ permissions_to_assign = permissions_to_assign + cls.get_permissions_to_assign(
+ permission_set, process_related_path_segment, target_uris
+ )
+ return permissions_to_assign
+
+ @classmethod
+ def explode_permissions(
+ cls, permission_set: str, target: str
+ ) -> list[PermissionToAssign]:
+ """Explodes given permissions to and returns list of PermissionToAssign objects.
+
+ These can be used to then iterate through and inserted into the database.
+ Target Macros:
+ ALL
+ * gives access to ALL api endpoints - useful to give admin-like permissions
+ PG:[process_group_identifier]
+ * affects given process-group and all sub process-groups and process-models
+ PM:[process_model_identifier]
+ * affects given process-model
+ BASIC
+ * Basic access to complete tasks and use the site
+
+ Permission Macros:
+ all
+ * create, read, update, delete
+ start
+ * create process-instances (aka instantiate or start a process-model)
+ * only works with PG and PM target macros
+ """
+ permissions_to_assign: list[PermissionToAssign] = []
+ permissions = permission_set.split(",")
+ if permission_set == "all":
+ permissions = ["create", "read", "update", "delete"]
+
+ if target.startswith("PG:"):
+ permissions_to_assign += cls.set_process_group_permissions(
+ target, permission_set
+ )
+ elif target.startswith("PM:"):
+ permissions_to_assign += cls.set_process_model_permissions(
+ target, permission_set
+ )
+ elif permission_set == "start":
+ raise InvalidPermissionError(
+ "Permission 'start' is only available for macros PM and PG."
+ )
+
+ elif target.startswith("BASIC"):
+ permissions_to_assign += cls.set_basic_permissions()
+ elif target == "ALL":
+ for permission in permissions:
+ permissions_to_assign.append(
+ PermissionToAssign(permission=permission, target_uri="/*")
+ )
+ elif target.startswith("/"):
+ for permission in permissions:
+ permissions_to_assign.append(
+ PermissionToAssign(permission=permission, target_uri=target)
+ )
+ else:
+ raise InvalidPermissionError(
+ f"Target uri '{target}' with permission set '{permission_set}' is"
+ " invalid. The target uri must either be a macro of PG, PM, BASIC, or"
+ " ALL or an api uri."
+ )
+
+ return permissions_to_assign
+
+ @classmethod
+ def add_permission_from_uri_or_macro(
+ cls, group_identifier: str, permission: str, target: str
+ ) -> list[PermissionAssignmentModel]:
+ """Add_permission_from_uri_or_macro."""
+ group = GroupService.find_or_create_group(group_identifier)
+ permissions_to_assign = cls.explode_permissions(permission, target)
+ permission_assignments = []
+ for permission_to_assign in permissions_to_assign:
+ permission_target = cls.find_or_create_permission_target(
+ permission_to_assign.target_uri
+ )
+ permission_assignments.append(
+ cls.create_permission_for_principal(
+ group.principal, permission_target, permission_to_assign.permission
+ )
+ )
+ return permission_assignments
+
+ @classmethod
+ def refresh_permissions(cls, group_info: list[dict[str, Any]]) -> None:
+ """Adds new permission assignments and deletes old ones."""
+ initial_permission_assignments = PermissionAssignmentModel.query.all()
+ result = cls.import_permissions_from_yaml_file()
+ desired_permission_assignments = result["permission_assignments"]
+ desired_group_identifiers = result["group_identifiers"]
+
+ for group in group_info:
+ group_identifier = group["name"]
+ for username in group["users"]:
+ GroupService.add_user_to_group_or_add_to_waiting(
+ username, group_identifier
+ )
+ desired_group_identifiers.add(group_identifier)
+ for permission in group["permissions"]:
+ for crud_op in permission["actions"]:
+ desired_permission_assignments.extend(
+ cls.add_permission_from_uri_or_macro(
+ group_identifier=group_identifier,
+ target=permission["uri"],
+ permission=crud_op,
+ )
+ )
+ desired_group_identifiers.add(group_identifier)
+
+ for ipa in initial_permission_assignments:
+ if ipa not in desired_permission_assignments:
+ db.session.delete(ipa)
+
+ groups_to_delete = GroupModel.query.filter(
+ GroupModel.identifier.not_in(desired_group_identifiers)
+ ).all()
+ for gtd in groups_to_delete:
+ db.session.delete(gtd)
+ db.session.commit()
+
class KeycloakAuthorization:
"""Interface with Keycloak server."""
diff --git a/src/spiffworkflow_backend/services/file_system_service.py b/src/spiffworkflow_backend/services/file_system_service.py
index a2a9181d4..cb8b44c6d 100644
--- a/src/spiffworkflow_backend/services/file_system_service.py
+++ b/src/spiffworkflow_backend/services/file_system_service.py
@@ -40,10 +40,9 @@ class FileSystemService:
@staticmethod
def root_path() -> str:
"""Root_path."""
- # fixme: allow absolute files
dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
- app_root = current_app.root_path
- return os.path.abspath(os.path.join(app_root, "..", dir_name))
+ # ensure this is a string - thanks mypy...
+ return os.path.abspath(os.path.join(dir_name, ""))
@staticmethod
def id_string_to_relative_path(id_string: str) -> str:
diff --git a/src/spiffworkflow_backend/services/git_service.py b/src/spiffworkflow_backend/services/git_service.py
index 8ef952c3c..43c18edc6 100644
--- a/src/spiffworkflow_backend/services/git_service.py
+++ b/src/spiffworkflow_backend/services/git_service.py
@@ -100,6 +100,7 @@ class GitService:
branch_name_to_use,
git_username,
git_email,
+ current_app.config["GIT_USER_PASSWORD"],
]
return cls.run_shell_command_to_get_stdout(shell_command)
@@ -172,13 +173,15 @@ class GitService:
if "repository" not in webhook or "clone_url" not in webhook["repository"]:
raise InvalidGitWebhookBodyError(
- f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}"
+ "Cannot find required keys of 'repository:clone_url' from webhook"
+ f" body: {webhook}"
)
clone_url = webhook["repository"]["clone_url"]
if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]:
raise GitCloneUrlMismatchError(
- f"Configured clone url does not match clone url from webhook: {clone_url}"
+ "Configured clone url does not match clone url from webhook:"
+ f" {clone_url}"
)
if "ref" not in webhook:
@@ -188,8 +191,8 @@ class GitService:
if current_app.config["GIT_BRANCH"] is None:
raise MissingGitConfigsError(
- "Missing config for GIT_BRANCH. "
- "This is required for updating the repository as a result of the webhook"
+ "Missing config for GIT_BRANCH. This is required for updating the"
+ " repository as a result of the webhook"
)
ref = webhook["ref"]
diff --git a/src/spiffworkflow_backend/services/group_service.py b/src/spiffworkflow_backend/services/group_service.py
index aa560009e..911d41ac4 100644
--- a/src/spiffworkflow_backend/services/group_service.py
+++ b/src/spiffworkflow_backend/services/group_service.py
@@ -4,6 +4,7 @@ from typing import Optional
from flask_bpmn.models.db import db
from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.user_service import UserService
@@ -22,3 +23,15 @@ class GroupService:
db.session.commit()
UserService.create_principal(group.id, id_column_name="group_id")
return group
+
+ @classmethod
+ def add_user_to_group_or_add_to_waiting(
+ cls, username: str, group_identifier: str
+ ) -> None:
+ """Add_user_to_group_or_add_to_waiting."""
+ group = cls.find_or_create_group(group_identifier)
+ user = UserModel.query.filter_by(username=username).first()
+ if user:
+ UserService.add_user_to_group(user, group)
+ else:
+ UserService.add_waiting_group_assignment(username, group)
diff --git a/src/spiffworkflow_backend/services/logging_service.py b/src/spiffworkflow_backend/services/logging_service.py
index dd34cb3fd..599d5228d 100644
--- a/src/spiffworkflow_backend/services/logging_service.py
+++ b/src/spiffworkflow_backend/services/logging_service.py
@@ -122,7 +122,8 @@ def setup_logger(app: Flask) -> None:
if upper_log_level_string not in log_levels:
raise InvalidLogLevelError(
- f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}"
+ f"Log level given is invalid: '{upper_log_level_string}'. Valid options are"
+ f" {log_levels}"
)
log_level = getattr(logging, upper_log_level_string)
@@ -176,7 +177,8 @@ def setup_logger(app: Flask) -> None:
spiff_logger = logging.getLogger("spiff")
spiff_logger.setLevel(spiff_log_level)
spiff_formatter = logging.Formatter(
- "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s"
+ "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |"
+ " %(process)s | %(processName)s | %(process_instance_id)s"
)
# if you add a handler to spiff, it will be used/inherited by spiff.metrics
diff --git a/src/spiffworkflow_backend/services/message_service.py b/src/spiffworkflow_backend/services/message_service.py
index cfb42c836..b3d1e831f 100644
--- a/src/spiffworkflow_backend/services/message_service.py
+++ b/src/spiffworkflow_backend/services/message_service.py
@@ -145,8 +145,11 @@ class MessageService:
if process_instance_receive is None:
raise MessageServiceError(
(
- f"Process instance cannot be found for queued message: {message_instance_receive.id}."
- f"Tried with id {message_instance_receive.process_instance_id}",
+ (
+ "Process instance cannot be found for queued message:"
+ f" {message_instance_receive.id}.Tried with id"
+ f" {message_instance_receive.process_instance_id}"
+ ),
)
)
@@ -182,7 +185,6 @@ class MessageService:
)
for message_instance_receive in message_instances_receive:
-
# sqlalchemy supports select / where statements like active record apparantly
# https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions
message_correlation_select = (
diff --git a/src/spiffworkflow_backend/services/process_instance_processor.py b/src/spiffworkflow_backend/services/process_instance_processor.py
index 5edc526cf..5ef4baf01 100644
--- a/src/spiffworkflow_backend/services/process_instance_processor.py
+++ b/src/spiffworkflow_backend/services/process_instance_processor.py
@@ -17,6 +17,7 @@ from typing import Optional
from typing import Tuple
from typing import TypedDict
from typing import Union
+from uuid import UUID
import dateparser
import pytz
@@ -43,6 +44,9 @@ from SpiffWorkflow.spiff.serializer.task_spec_converters import (
CallActivityTaskConverter,
)
from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
+from SpiffWorkflow.spiff.serializer.task_spec_converters import (
+ EventBasedGatewayConverter,
+)
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
IntermediateCatchEventConverter,
)
@@ -65,11 +69,11 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
-from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
from spiffworkflow_backend.models.file import File
from spiffworkflow_backend.models.file import FileType
from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
from spiffworkflow_backend.models.message_correlation_message_instance import (
MessageCorrelationMessageInstanceModel,
@@ -151,6 +155,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
"time": time,
"decimal": decimal,
"_strptime": _strptime,
+ "enumerate": enumerate,
+ "list": list,
+ "map": map,
}
# This will overwrite the standard builtins
@@ -209,14 +216,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
except Exception as exception:
if task is None:
raise ProcessInstanceProcessorError(
- "Error evaluating expression: "
- "'%s', exception: %s" % (expression, str(exception)),
+ "Error evaluating expression: '%s', exception: %s"
+ % (expression, str(exception)),
) from exception
else:
raise WorkflowTaskExecException(
task,
- "Error evaluating expression "
- "'%s', %s" % (expression, str(exception)),
+ "Error evaluating expression '%s', %s"
+ % (expression, str(exception)),
) from exception
def execute(
@@ -263,6 +270,7 @@ class ProcessInstanceProcessor:
EndEventConverter,
IntermediateCatchEventConverter,
IntermediateThrowEventConverter,
+ EventBasedGatewayConverter,
ManualTaskConverter,
NoneTaskConverter,
ReceiveTaskConverter,
@@ -276,6 +284,7 @@ class ProcessInstanceProcessor:
]
)
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
+ _event_serializer = EventBasedGatewayConverter()
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
VALIDATION_PROCESS_KEY = "validate_only"
@@ -292,9 +301,7 @@ class ProcessInstanceProcessor:
tld.spiff_step = process_instance_model.spiff_step
# we want this to be the fully qualified path to the process model including all group subcomponents
- current_app.config[
- "THREAD_LOCAL_DATA"
- ].process_model_identifier = (
+ current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
f"{process_instance_model.process_model_identifier}"
)
@@ -375,8 +382,10 @@ class ProcessInstanceProcessor:
except MissingSpecError as ke:
raise ApiError(
error_code="unexpected_process_instance_structure",
- message="Failed to deserialize process_instance"
- " '%s' due to a mis-placed or missing task '%s'"
+ message=(
+ "Failed to deserialize process_instance"
+ " '%s' due to a mis-placed or missing task '%s'"
+ )
% (self.process_model_identifier, str(ke)),
) from ke
@@ -392,7 +401,10 @@ class ProcessInstanceProcessor:
raise (
ApiError(
"process_model_not_found",
- f"The given process model was not found: {process_model_identifier}.",
+ (
+ "The given process model was not found:"
+ f" {process_model_identifier}."
+ ),
)
)
spec_files = SpecFileService.get_files(process_model_info)
@@ -522,8 +534,11 @@ class ProcessInstanceProcessor:
potential_owner_ids.append(lane_owner_user.id)
self.raise_if_no_potential_owners(
potential_owner_ids,
- f"No users found in task data lane owner list for lane: {task_lane}. "
- f"The user list used: {task.data['lane_owners'][task_lane]}",
+ (
+ "No users found in task data lane owner list for lane:"
+ f" {task_lane}. The user list used:"
+ f" {task.data['lane_owners'][task_lane]}"
+ ),
)
else:
group_model = GroupModel.query.filter_by(identifier=task_lane).first()
@@ -558,7 +573,7 @@ class ProcessInstanceProcessor:
"spiff_step": self.process_instance_model.spiff_step or 1,
"task_json": task_json,
"timestamp": round(time.time()),
- "completed_by_user_id": self.current_user().id,
+ # "completed_by_user_id": self.current_user().id,
}
def spiff_step_details(self) -> SpiffStepDetailsModel:
@@ -569,17 +584,10 @@ class ProcessInstanceProcessor:
spiff_step=details_mapping["spiff_step"],
task_json=details_mapping["task_json"],
timestamp=details_mapping["timestamp"],
- completed_by_user_id=details_mapping["completed_by_user_id"],
+ # completed_by_user_id=details_mapping["completed_by_user_id"],
)
return details_model
- def save_spiff_step_details(self, active_task: ActiveTaskModel) -> None:
- """SaveSpiffStepDetails."""
- details_model = self.spiff_step_details()
- details_model.lane_assignment_id = active_task.lane_assignment_id
- db.session.add(details_model)
- db.session.commit()
-
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
"""Extract_metadata."""
metadata_extraction_paths = process_model_info.metadata_extraction_paths
@@ -615,6 +623,29 @@ class ProcessInstanceProcessor:
db.session.add(pim)
db.session.commit()
+ def get_subprocesses_by_child_task_ids(self) -> dict:
+ """Get all subprocess ids based on the child task ids.
+
+ This is useful when trying to link the child task of a call activity back to
+ the call activity that called it to get the appropriate data. For example, if you
+ have a call activity "Log" that you call twice within the same process, the Hammer log file
+ activity within the Log process will get called twice. They will potentially have different
+ task data. We want to be able to differentiate those two activities.
+
+ subprocess structure in the json:
+ "subprocesses": { [subprocess_task_id]: "tasks" : { [task_id]: [bpmn_task_details] }}
+
+ Also note that subprocess_task_id might in fact be a call activity, because spiff treats
+ call activities like subprocesses in terms of the serialization.
+ """
+ bpmn_json = json.loads(self.serialize())
+ subprocesses_by_child_task_ids = {}
+ if "subprocesses" in bpmn_json:
+ for subprocess_id, subprocess_details in bpmn_json["subprocesses"].items():
+ for task_id in subprocess_details["tasks"]:
+ subprocesses_by_child_task_ids[task_id] = subprocess_id
+ return subprocesses_by_child_task_ids
+
def save(self) -> None:
"""Saves the current state of this processor to the database."""
self.process_instance_model.bpmn_json = self.serialize()
@@ -637,7 +668,7 @@ class ProcessInstanceProcessor:
db.session.add(self.process_instance_model)
db.session.commit()
- active_tasks = ActiveTaskModel.query.filter_by(
+ human_tasks = HumanTaskModel.query.filter_by(
process_instance_id=self.process_instance_model.id
).all()
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
@@ -668,14 +699,14 @@ class ProcessInstanceProcessor:
if "formUiSchemaFilename" in properties:
ui_form_file_name = properties["formUiSchemaFilename"]
- active_task = None
- for at in active_tasks:
+ human_task = None
+ for at in human_tasks:
if at.task_id == str(ready_or_waiting_task.id):
- active_task = at
- active_tasks.remove(at)
+ human_task = at
+ human_tasks.remove(at)
- if active_task is None:
- active_task = ActiveTaskModel(
+ if human_task is None:
+ human_task = HumanTaskModel(
process_instance_id=self.process_instance_model.id,
process_model_display_name=process_model_display_name,
form_file_name=form_file_name,
@@ -687,23 +718,118 @@ class ProcessInstanceProcessor:
task_status=ready_or_waiting_task.get_state_name(),
lane_assignment_id=potential_owner_hash["lane_assignment_id"],
)
- db.session.add(active_task)
+ db.session.add(human_task)
db.session.commit()
for potential_owner_id in potential_owner_hash[
"potential_owner_ids"
]:
- active_task_user = ActiveTaskUserModel(
- user_id=potential_owner_id, active_task_id=active_task.id
+ human_task_user = HumanTaskUserModel(
+ user_id=potential_owner_id, human_task_id=human_task.id
)
- db.session.add(active_task_user)
+ db.session.add(human_task_user)
db.session.commit()
- if len(active_tasks) > 0:
- for at in active_tasks:
- db.session.delete(at)
+ if len(human_tasks) > 0:
+ for at in human_tasks:
+ at.completed = True
+ db.session.add(at)
db.session.commit()
+ def serialize_task_spec(self, task_spec: SpiffTask) -> Any:
+ """Get a serialized version of a task spec."""
+ # The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask
+ # Not sure why mypy accepts this but whatever.
+ return self._serializer.spec_converter.convert(task_spec)
+
+ def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
+ """Send an event to the workflow."""
+ payload = event_data.pop("payload", None)
+ event_definition = self._event_serializer.restore(event_data)
+ if payload is not None:
+ event_definition.payload = payload
+ current_app.logger.info(
+ f"Event of type {event_definition.event_type} sent to process instance"
+ f" {self.process_instance_model.id}"
+ )
+ self.bpmn_process_instance.catch(event_definition)
+ self.do_engine_steps(save=True)
+
+ def add_step(self, step: Union[dict, None] = None) -> None:
+ """Add a spiff step."""
+ if step is None:
+ step = self.spiff_step_details_mapping()
+ db.session.add(SpiffStepDetailsModel(**step))
+ db.session.commit()
+
+ def manual_complete_task(self, task_id: str, execute: bool) -> None:
+ """Mark the task complete optionally executing it."""
+ spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
+ if execute:
+ current_app.logger.info(
+ f"Manually executing Task {spiff_task.task_spec.name} of process"
+ f" instance {self.process_instance_model.id}"
+ )
+ spiff_task.complete()
+ else:
+ spiff_logger = logging.getLogger("spiff")
+ spiff_logger.info(
+ f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info()
+ )
+ spiff_task._set_state(TaskState.COMPLETED)
+ for child in spiff_task.children:
+ child.task_spec._update(child)
+ self.bpmn_process_instance.last_task = spiff_task
+ self.increment_spiff_step()
+ self.add_step()
+ self.save()
+ # Saving the workflow seems to reset the status
+ self.suspend()
+
+ def reset_process(self, spiff_step: int) -> None:
+ """Reset a process to an earlier state."""
+ spiff_logger = logging.getLogger("spiff")
+ spiff_logger.info(
+ f"Process reset from step {spiff_step}",
+ extra=self.bpmn_process_instance.log_info(),
+ )
+
+ step_detail = (
+ db.session.query(SpiffStepDetailsModel)
+ .filter(
+ SpiffStepDetailsModel.process_instance_id
+ == self.process_instance_model.id,
+ SpiffStepDetailsModel.spiff_step == spiff_step,
+ )
+ .first()
+ )
+ if step_detail is not None:
+ self.increment_spiff_step()
+ self.add_step(
+ {
+ "process_instance_id": self.process_instance_model.id,
+ "spiff_step": self.process_instance_model.spiff_step or 1,
+ "task_json": step_detail.task_json,
+ "timestamp": round(time.time()),
+ }
+ )
+
+ dct = self._serializer.workflow_to_dict(self.bpmn_process_instance)
+ dct["tasks"] = step_detail.task_json["tasks"]
+ dct["subprocesses"] = step_detail.task_json["subprocesses"]
+ self.bpmn_process_instance = self._serializer.workflow_from_dict(dct)
+
+ # Cascade does not seems to work on filters, only directly through the session
+ tasks = self.bpmn_process_instance.get_tasks(TaskState.NOT_FINISHED_MASK)
+ rows = HumanTaskModel.query.filter(
+ HumanTaskModel.task_id.in_(str(t.id) for t in tasks) # type: ignore
+ ).all()
+ for row in rows:
+ db.session.delete(row)
+
+ self.save()
+ self.suspend()
+
@staticmethod
def get_parser() -> MyCustomParser:
"""Get_parser."""
@@ -738,14 +864,13 @@ class ProcessInstanceProcessor:
"""Bpmn_file_full_path_from_bpmn_process_identifier."""
if bpmn_process_identifier is None:
raise ValueError(
- "bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
+ "bpmn_file_full_path_from_bpmn_process_identifier:"
+ " bpmn_process_identifier is unexpectedly None"
)
- spec_reference = (
- SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier)
- .filter_by(type="process")
- .first()
- )
+ spec_reference = SpecReferenceCache.query.filter_by(
+ identifier=bpmn_process_identifier, type="process"
+ ).first()
bpmn_file_full_path = None
if spec_reference is None:
bpmn_file_full_path = (
@@ -762,7 +887,10 @@ class ProcessInstanceProcessor:
raise (
ApiError(
error_code="could_not_find_bpmn_process_identifier",
- message="Could not find the the given bpmn process identifier from any sources: %s"
+ message=(
+ "Could not find the the given bpmn process identifier from any"
+ " sources: %s"
+ )
% bpmn_process_identifier,
)
)
@@ -786,7 +914,6 @@ class ProcessInstanceProcessor:
new_bpmn_files = set()
for bpmn_process_identifier in processor_dependencies_new:
-
# ignore identifiers that spiff already knows about
if bpmn_process_identifier in bpmn_process_identifiers_in_parser:
continue
@@ -829,7 +956,10 @@ class ProcessInstanceProcessor:
raise (
ApiError(
error_code="no_primary_bpmn_error",
- message="There is no primary BPMN process id defined for process_model %s"
+ message=(
+ "There is no primary BPMN process id defined for"
+ " process_model %s"
+ )
% process_model_info.id,
)
)
@@ -890,7 +1020,10 @@ class ProcessInstanceProcessor:
if not bpmn_message.correlations:
raise ApiError(
"message_correlations_missing",
- f"Could not find any message correlations bpmn_message: {bpmn_message.name}",
+ (
+ "Could not find any message correlations bpmn_message:"
+ f" {bpmn_message.name}"
+ ),
)
message_correlations = []
@@ -910,12 +1043,16 @@ class ProcessInstanceProcessor:
if message_correlation_property is None:
raise ApiError(
"message_correlations_missing_from_process",
- "Could not find a known message correlation with identifier:"
- f"{message_correlation_property_identifier}",
+ (
+ "Could not find a known message correlation with"
+ f" identifier:{message_correlation_property_identifier}"
+ ),
)
message_correlations.append(
{
- "message_correlation_property": message_correlation_property,
+ "message_correlation_property": (
+ message_correlation_property
+ ),
"name": message_correlation_key,
"value": message_correlation_property_value,
}
@@ -972,7 +1109,10 @@ class ProcessInstanceProcessor:
if message_model is None:
raise ApiError(
"invalid_message_name",
- f"Invalid message name: {waiting_task.task_spec.event_definition.name}.",
+ (
+ "Invalid message name:"
+ f" {waiting_task.task_spec.event_definition.name}."
+ ),
)
# Ensure we are only creating one message instance for each waiting message
@@ -1179,11 +1319,20 @@ class ProcessInstanceProcessor:
)
return user_tasks # type: ignore
- def complete_task(self, task: SpiffTask, active_task: ActiveTaskModel) -> None:
+ def complete_task(
+ self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel
+ ) -> None:
"""Complete_task."""
self.increment_spiff_step()
self.bpmn_process_instance.complete_task_from_id(task.id)
- self.save_spiff_step_details(active_task)
+ human_task.completed_by_user_id = user.id
+ human_task.completed = True
+ db.session.add(human_task)
+ details_model = self.spiff_step_details()
+ db.session.add(details_model)
+
+ # this is the thing that actually commits the db transaction (on behalf of the other updates above as well)
+ self.save()
def get_data(self) -> dict[str, Any]:
"""Get_data."""
diff --git a/src/spiffworkflow_backend/services/process_instance_report_service.py b/src/spiffworkflow_backend/services/process_instance_report_service.py
index 84d5d6752..cd20b9b57 100644
--- a/src/spiffworkflow_backend/services/process_instance_report_service.py
+++ b/src/spiffworkflow_backend/services/process_instance_report_service.py
@@ -1,14 +1,31 @@
"""Process_instance_report_service."""
+import re
from dataclasses import dataclass
+from typing import Any
from typing import Optional
import sqlalchemy
+from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
+from sqlalchemy import and_
+from sqlalchemy import func
+from sqlalchemy import or_
+from sqlalchemy.orm import aliased
+from sqlalchemy.orm import selectinload
+from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
+from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.process_instance_metadata import (
+ ProcessInstanceMetadataModel,
+)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
@dataclass
@@ -16,14 +33,17 @@ class ProcessInstanceReportFilter:
"""ProcessInstanceReportFilter."""
process_model_identifier: Optional[str] = None
+ user_group_identifier: Optional[str] = None
start_from: Optional[int] = None
start_to: Optional[int] = None
end_from: Optional[int] = None
end_to: Optional[int] = None
process_status: Optional[list[str]] = None
initiated_by_me: Optional[bool] = None
+ has_terminal_status: Optional[bool] = None
with_tasks_completed_by_me: Optional[bool] = None
- with_tasks_completed_by_my_group: Optional[bool] = None
+ with_tasks_assigned_to_my_group: Optional[bool] = None
+ with_relation_to_me: Optional[bool] = None
def to_dict(self) -> dict[str, str]:
"""To_dict."""
@@ -31,6 +51,8 @@ class ProcessInstanceReportFilter:
if self.process_model_identifier is not None:
d["process_model_identifier"] = self.process_model_identifier
+ if self.user_group_identifier is not None:
+ d["user_group_identifier"] = self.user_group_identifier
if self.start_from is not None:
d["start_from"] = str(self.start_from)
if self.start_to is not None:
@@ -43,14 +65,18 @@ class ProcessInstanceReportFilter:
d["process_status"] = ",".join(self.process_status)
if self.initiated_by_me is not None:
d["initiated_by_me"] = str(self.initiated_by_me).lower()
+ if self.has_terminal_status is not None:
+ d["has_terminal_status"] = str(self.has_terminal_status).lower()
if self.with_tasks_completed_by_me is not None:
d["with_tasks_completed_by_me"] = str(
self.with_tasks_completed_by_me
).lower()
- if self.with_tasks_completed_by_my_group is not None:
- d["with_tasks_completed_by_my_group"] = str(
- self.with_tasks_completed_by_my_group
+ if self.with_tasks_assigned_to_my_group is not None:
+ d["with_tasks_assigned_to_my_group"] = str(
+ self.with_tasks_assigned_to_my_group
).lower()
+ if self.with_relation_to_me is not None:
+ d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
return d
@@ -58,6 +84,55 @@ class ProcessInstanceReportFilter:
class ProcessInstanceReportService:
"""ProcessInstanceReportService."""
+ @classmethod
+ def system_metadata_map(cls, metadata_key: str) -> dict[str, Any]:
+ """System_metadata_map."""
+ # TODO replace with system reports that are loaded on launch (or similar)
+ temp_system_metadata_map = {
+ "default": {
+ "columns": cls.builtin_column_options(),
+ "filter_by": [],
+ "order_by": ["-start_in_seconds", "-id"],
+ },
+ "system_report_completed_instances_initiated_by_me": {
+ "columns": [
+ {"Header": "id", "accessor": "id"},
+ {
+ "Header": "process_model_display_name",
+ "accessor": "process_model_display_name",
+ },
+ {"Header": "start_in_seconds", "accessor": "start_in_seconds"},
+ {"Header": "end_in_seconds", "accessor": "end_in_seconds"},
+ {"Header": "status", "accessor": "status"},
+ ],
+ "filter_by": [
+ {"field_name": "initiated_by_me", "field_value": True},
+ {"field_name": "has_terminal_status", "field_value": True},
+ ],
+ "order_by": ["-start_in_seconds", "-id"],
+ },
+ "system_report_completed_instances_with_tasks_completed_by_me": {
+ "columns": cls.builtin_column_options(),
+ "filter_by": [
+ {"field_name": "with_tasks_completed_by_me", "field_value": True},
+ {"field_name": "has_terminal_status", "field_value": True},
+ ],
+ "order_by": ["-start_in_seconds", "-id"],
+ },
+ "system_report_completed_instances_with_tasks_completed_by_my_groups": {
+ "columns": cls.builtin_column_options(),
+ "filter_by": [
+ {
+ "field_name": "with_tasks_assigned_to_my_group",
+ "field_value": True,
+ },
+ {"field_name": "has_terminal_status", "field_value": True},
+ ],
+ "order_by": ["-start_in_seconds", "-id"],
+ },
+ }
+ return temp_system_metadata_map[metadata_key]
+
@classmethod
def report_with_identifier(
cls,
@@ -82,50 +157,10 @@ class ProcessInstanceReportService:
if process_instance_report is not None:
return process_instance_report # type: ignore
- # TODO replace with system reports that are loaded on launch (or similar)
- temp_system_metadata_map = {
- "default": {
- "columns": cls.builtin_column_options(),
- "filter_by": [],
- "order_by": ["-start_in_seconds", "-id"],
- },
- "system_report_instances_initiated_by_me": {
- "columns": [
- {"Header": "id", "accessor": "id"},
- {
- "Header": "process_model_display_name",
- "accessor": "process_model_display_name",
- },
- {"Header": "start_in_seconds", "accessor": "start_in_seconds"},
- {"Header": "end_in_seconds", "accessor": "end_in_seconds"},
- {"Header": "status", "accessor": "status"},
- ],
- "filter_by": [{"field_name": "initiated_by_me", "field_value": True}],
- "order_by": ["-start_in_seconds", "-id"],
- },
- "system_report_instances_with_tasks_completed_by_me": {
- "columns": cls.builtin_column_options(),
- "filter_by": [
- {"field_name": "with_tasks_completed_by_me", "field_value": True}
- ],
- "order_by": ["-start_in_seconds", "-id"],
- },
- "system_report_instances_with_tasks_completed_by_my_groups": {
- "columns": cls.builtin_column_options(),
- "filter_by": [
- {
- "field_name": "with_tasks_completed_by_my_group",
- "field_value": True,
- }
- ],
- "order_by": ["-start_in_seconds", "-id"],
- },
- }
-
process_instance_report = ProcessInstanceReportModel(
identifier=report_identifier,
created_by_id=user.id,
- report_metadata=temp_system_metadata_map[report_identifier],
+ report_metadata=cls.system_metadata_map(report_identifier),
)
return process_instance_report # type: ignore
@@ -164,27 +199,31 @@ class ProcessInstanceReportService:
return filters[key].split(",") if key in filters else None
process_model_identifier = filters.get("process_model_identifier")
+ user_group_identifier = filters.get("user_group_identifier")
start_from = int_value("start_from")
start_to = int_value("start_to")
end_from = int_value("end_from")
end_to = int_value("end_to")
process_status = list_value("process_status")
initiated_by_me = bool_value("initiated_by_me")
+ has_terminal_status = bool_value("has_terminal_status")
with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me")
- with_tasks_completed_by_my_group = bool_value(
- "with_tasks_completed_by_my_group"
- )
+ with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group")
+ with_relation_to_me = bool_value("with_relation_to_me")
report_filter = ProcessInstanceReportFilter(
process_model_identifier,
+ user_group_identifier,
start_from,
start_to,
end_from,
end_to,
process_status,
initiated_by_me,
+ has_terminal_status,
with_tasks_completed_by_me,
- with_tasks_completed_by_my_group,
+ with_tasks_assigned_to_my_group,
+ with_relation_to_me,
)
return report_filter
@@ -194,20 +233,25 @@ class ProcessInstanceReportService:
cls,
process_instance_report: ProcessInstanceReportModel,
process_model_identifier: Optional[str] = None,
+ user_group_identifier: Optional[str] = None,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
initiated_by_me: Optional[bool] = None,
+ has_terminal_status: Optional[bool] = None,
with_tasks_completed_by_me: Optional[bool] = None,
- with_tasks_completed_by_my_group: Optional[bool] = None,
+ with_tasks_assigned_to_my_group: Optional[bool] = None,
+ with_relation_to_me: Optional[bool] = None,
) -> ProcessInstanceReportFilter:
"""Filter_from_metadata_with_overrides."""
report_filter = cls.filter_from_metadata(process_instance_report)
if process_model_identifier is not None:
report_filter.process_model_identifier = process_model_identifier
+ if user_group_identifier is not None:
+ report_filter.user_group_identifier = user_group_identifier
if start_from is not None:
report_filter.start_from = start_from
if start_to is not None:
@@ -220,12 +264,16 @@ class ProcessInstanceReportService:
report_filter.process_status = process_status.split(",")
if initiated_by_me is not None:
report_filter.initiated_by_me = initiated_by_me
+ if has_terminal_status is not None:
+ report_filter.has_terminal_status = has_terminal_status
if with_tasks_completed_by_me is not None:
report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me
- if with_tasks_completed_by_my_group is not None:
- report_filter.with_tasks_completed_by_my_group = (
- with_tasks_completed_by_my_group
+ if with_tasks_assigned_to_my_group is not None:
+ report_filter.with_tasks_assigned_to_my_group = (
+ with_tasks_assigned_to_my_group
)
+ if with_relation_to_me is not None:
+ report_filter.with_relation_to_me = with_relation_to_me
return report_filter
@@ -241,9 +289,9 @@ class ProcessInstanceReportService:
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
for metadata_column in metadata_columns:
if metadata_column["accessor"] not in process_instance_dict:
- process_instance_dict[
- metadata_column["accessor"]
- ] = process_instance[metadata_column["accessor"]]
+ process_instance_dict[metadata_column["accessor"]] = (
+ process_instance[metadata_column["accessor"]]
+ )
results.append(process_instance_dict)
return results
@@ -268,3 +316,207 @@ class ProcessInstanceReportService:
{"Header": "Username", "accessor": "username", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
]
+
+ @classmethod
+ def run_process_instance_report(
+ cls,
+ report_filter: ProcessInstanceReportFilter,
+ process_instance_report: ProcessInstanceReportModel,
+ user: UserModel,
+ page: int = 1,
+ per_page: int = 100,
+ ) -> dict:
+ """Run_process_instance_report."""
+ process_instance_query = ProcessInstanceModel.query
+ # Always join that hot user table for good performance at serialization time.
+ process_instance_query = process_instance_query.options(
+ selectinload(ProcessInstanceModel.process_initiator)
+ )
+
+ if report_filter.process_model_identifier is not None:
+ process_model = ProcessModelService.get_process_model(
+ f"{report_filter.process_model_identifier}",
+ )
+
+ process_instance_query = process_instance_query.filter_by(
+ process_model_identifier=process_model.id
+ )
+
+ # this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
+ if (
+ ProcessInstanceModel.start_in_seconds is None
+ or ProcessInstanceModel.end_in_seconds is None
+ ):
+ raise (
+ ApiError(
+ error_code="unexpected_condition",
+ message="Something went very wrong",
+ status_code=500,
+ )
+ )
+
+ if report_filter.start_from is not None:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.start_in_seconds >= report_filter.start_from
+ )
+ if report_filter.start_to is not None:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.start_in_seconds <= report_filter.start_to
+ )
+ if report_filter.end_from is not None:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.end_in_seconds >= report_filter.end_from
+ )
+ if report_filter.end_to is not None:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.end_in_seconds <= report_filter.end_to
+ )
+ if report_filter.process_status is not None:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore
+ )
+
+ if report_filter.initiated_by_me is True:
+ process_instance_query = process_instance_query.filter_by(
+ process_initiator=user
+ )
+
+ if report_filter.has_terminal_status is True:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
+ )
+
+ if (
+ not report_filter.with_tasks_completed_by_me
+ and not report_filter.with_tasks_assigned_to_my_group
+ and report_filter.with_relation_to_me is True
+ ):
+ process_instance_query = process_instance_query.outerjoin(
+ HumanTaskModel
+ ).outerjoin(
+ HumanTaskUserModel,
+ and_(
+ HumanTaskModel.id == HumanTaskUserModel.human_task_id,
+ HumanTaskUserModel.user_id == user.id,
+ ),
+ )
+ process_instance_query = process_instance_query.filter(
+ or_(
+ HumanTaskUserModel.id.is_not(None),
+ ProcessInstanceModel.process_initiator_id == user.id,
+ )
+ )
+
+ if report_filter.with_tasks_completed_by_me is True:
+ process_instance_query = process_instance_query.filter(
+ ProcessInstanceModel.process_initiator_id != user.id
+ )
+ process_instance_query = process_instance_query.join(
+ HumanTaskModel,
+ and_(
+ HumanTaskModel.process_instance_id == ProcessInstanceModel.id,
+ HumanTaskModel.completed_by_user_id == user.id,
+ ),
+ )
+
+ if report_filter.with_tasks_assigned_to_my_group is True:
+ group_model_join_conditions = [
+ GroupModel.id == HumanTaskModel.lane_assignment_id
+ ]
+ if report_filter.user_group_identifier:
+ group_model_join_conditions.append(
+ GroupModel.identifier == report_filter.user_group_identifier
+ )
+ process_instance_query = process_instance_query.join(HumanTaskModel)
+ process_instance_query = process_instance_query.join(
+ GroupModel, and_(*group_model_join_conditions)
+ )
+ process_instance_query = process_instance_query.join(
+ UserGroupAssignmentModel,
+ UserGroupAssignmentModel.group_id == GroupModel.id,
+ )
+ process_instance_query = process_instance_query.filter(
+ UserGroupAssignmentModel.user_id == user.id
+ )
+
+ instance_metadata_aliases = {}
+ stock_columns = ProcessInstanceReportService.get_column_names_for_model(
+ ProcessInstanceModel
+ )
+ for column in process_instance_report.report_metadata["columns"]:
+ if column["accessor"] in stock_columns:
+ continue
+ instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
+ instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
+
+ filter_for_column = None
+ if "filter_by" in process_instance_report.report_metadata:
+ filter_for_column = next(
+ (
+ f
+ for f in process_instance_report.report_metadata["filter_by"]
+ if f["field_name"] == column["accessor"]
+ ),
+ None,
+ )
+ isouter = True
+ conditions = [
+ ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
+ instance_metadata_alias.key == column["accessor"],
+ ]
+ if filter_for_column:
+ isouter = False
+ conditions.append(
+ instance_metadata_alias.value == filter_for_column["field_value"]
+ )
+ process_instance_query = process_instance_query.join(
+ instance_metadata_alias, and_(*conditions), isouter=isouter
+ ).add_columns(
+ func.max(instance_metadata_alias.value).label(column["accessor"])
+ )
+
+ order_by_query_array = []
+ order_by_array = process_instance_report.report_metadata["order_by"]
+ if len(order_by_array) < 1:
+ order_by_array = ProcessInstanceReportModel.default_order_by()
+ for order_by_option in order_by_array:
+ attribute = re.sub("^-", "", order_by_option)
+ if attribute in stock_columns:
+ if order_by_option.startswith("-"):
+ order_by_query_array.append(
+ getattr(ProcessInstanceModel, attribute).desc()
+ )
+ else:
+ order_by_query_array.append(
+ getattr(ProcessInstanceModel, attribute).asc()
+ )
+ elif attribute in instance_metadata_aliases:
+ if order_by_option.startswith("-"):
+ order_by_query_array.append(
+ func.max(instance_metadata_aliases[attribute].value).desc()
+ )
+ else:
+ order_by_query_array.append(
+ func.max(instance_metadata_aliases[attribute].value).asc()
+ )
+ # return process_instance_query
+ process_instances = (
+ process_instance_query.group_by(ProcessInstanceModel.id)
+ .add_columns(ProcessInstanceModel.id)
+ .order_by(*order_by_query_array)
+ .paginate(page=page, per_page=per_page, error_out=False)
+ )
+ results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
+ process_instances.items, process_instance_report.report_metadata["columns"]
+ )
+ response_json = {
+ "report": process_instance_report,
+ "results": results,
+ "filters": report_filter.to_dict(),
+ "pagination": {
+ "count": len(results),
+ "total": process_instances.total,
+ "pages": process_instances.pages,
+ },
+ }
+ return response_json
diff --git a/src/spiffworkflow_backend/services/process_instance_service.py b/src/spiffworkflow_backend/services/process_instance_service.py
index 5b2781a20..9812609fe 100644
--- a/src/spiffworkflow_backend/services/process_instance_service.py
+++ b/src/spiffworkflow_backend/services/process_instance_service.py
@@ -2,13 +2,14 @@
import time
from typing import Any
from typing import List
+from typing import Optional
from flask import current_app
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
@@ -17,6 +18,7 @@ from spiffworkflow_backend.models.task import MultiInstanceType
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
+from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@@ -36,7 +38,10 @@ class ProcessInstanceService:
user: UserModel,
) -> ProcessInstanceModel:
"""Get_process_instance_from_spec."""
- current_git_revision = GitService.get_current_revision()
+ try:
+ current_git_revision = GitService.get_current_revision()
+ except GitCommandError:
+ current_git_revision = ""
process_instance_model = ProcessInstanceModel(
status=ProcessInstanceStatus.not_started.value,
process_initiator=user,
@@ -81,7 +86,8 @@ class ProcessInstanceService:
db.session.add(process_instance)
db.session.commit()
error_message = (
- f"Error running waiting task for process_instance {process_instance.id}"
+ "Error running waiting task for process_instance"
+ f" {process_instance.id}"
+ f"({process_instance.process_model_identifier}). {str(e)}"
)
current_app.logger.error(error_message)
@@ -121,7 +127,7 @@ class ProcessInstanceService:
if next_task_trying_again is not None:
process_instance_api.next_task = (
ProcessInstanceService.spiff_task_to_api_task(
- next_task_trying_again, add_docs_and_forms=True
+ processor, next_task_trying_again, add_docs_and_forms=True
)
)
@@ -174,7 +180,10 @@ class ProcessInstanceService:
else:
raise ApiError.from_task(
error_code="task_lane_user_error",
- message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it."
+ message=(
+ "Spiff Task %s lane user dict must have a key called"
+ " 'value' with the user's uid in it."
+ )
% spiff_task.task_spec.name,
task=spiff_task,
)
@@ -196,7 +205,7 @@ class ProcessInstanceService:
spiff_task: SpiffTask,
data: dict[str, Any],
user: UserModel,
- active_task: ActiveTaskModel,
+ human_task: HumanTaskModel,
) -> None:
"""All the things that need to happen when we complete a form.
@@ -210,7 +219,7 @@ class ProcessInstanceService:
dot_dct = ProcessInstanceService.create_dot_dict(data)
spiff_task.update_data(dot_dct)
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
- processor.complete_task(spiff_task, active_task)
+ processor.complete_task(spiff_task, human_task, user=user)
processor.do_engine_steps(save=True)
@staticmethod
@@ -277,7 +286,10 @@ class ProcessInstanceService:
@staticmethod
def spiff_task_to_api_task(
- spiff_task: SpiffTask, add_docs_and_forms: bool = False
+ processor: ProcessInstanceProcessor,
+ spiff_task: SpiffTask,
+ add_docs_and_forms: bool = False,
+ calling_subprocess_task_id: Optional[str] = None,
) -> Task:
"""Spiff_task_to_api_task."""
task_type = spiff_task.task_spec.spec_type
@@ -311,6 +323,8 @@ class ProcessInstanceService:
if spiff_task.parent:
parent_id = spiff_task.parent.id
+ serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec)
+
task = Task(
spiff_task.id,
spiff_task.task_spec.name,
@@ -324,7 +338,9 @@ class ProcessInstanceService:
process_identifier=spiff_task.task_spec._wf_spec.name,
properties=props,
parent=parent_id,
+ event_definition=serialized_task_spec.get("event_definition"),
call_activity_process_identifier=call_activity_process_identifier,
+ calling_subprocess_task_id=calling_subprocess_task_id,
)
return task
diff --git a/src/spiffworkflow_backend/services/process_model_service.py b/src/spiffworkflow_backend/services/process_model_service.py
index 67be986e1..8fa25bc08 100644
--- a/src/spiffworkflow_backend/services/process_model_service.py
+++ b/src/spiffworkflow_backend/services/process_model_service.py
@@ -13,6 +13,8 @@ from flask_bpmn.api.api_error import ApiError
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
)
+from spiffworkflow_backend.interfaces import ProcessGroupLite
+from spiffworkflow_backend.interfaces import ProcessGroupLitesWithCache
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@@ -146,7 +148,10 @@ class ProcessModelService(FileSystemService):
if len(instances) > 0:
raise ApiError(
error_code="existing_instances",
- message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
+ message=(
+ f"We cannot delete the model `{process_model_id}`, there are"
+ " existing instances that depend on it."
+ ),
)
process_model = self.get_process_model(process_model_id)
path = self.workflow_path(process_model)
@@ -224,31 +229,46 @@ class ProcessModelService(FileSystemService):
new_process_model_list = []
for process_model in process_models:
uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
- result = AuthorizationService.user_has_permission(
+ has_permission = AuthorizationService.user_has_permission(
user=user, permission="create", target_uri=uri
)
- if result:
+ if has_permission:
new_process_model_list.append(process_model)
return new_process_model_list
return process_models
@classmethod
- def get_parent_group_array(cls, process_identifier: str) -> list[dict]:
+ def get_parent_group_array_and_cache_it(
+ cls, process_identifier: str, process_group_cache: dict[str, ProcessGroup]
+ ) -> ProcessGroupLitesWithCache:
"""Get_parent_group_array."""
full_group_id_path = None
- parent_group_array = []
+ parent_group_array: list[ProcessGroupLite] = []
for process_group_id_segment in process_identifier.split("/")[0:-1]:
if full_group_id_path is None:
full_group_id_path = process_group_id_segment
else:
full_group_id_path = os.path.join(full_group_id_path, process_group_id_segment) # type: ignore
- parent_group = ProcessModelService.get_process_group(full_group_id_path)
+ parent_group = process_group_cache.get(full_group_id_path, None)
+ if parent_group is None:
+ parent_group = ProcessModelService.get_process_group(full_group_id_path)
+
if parent_group:
+ if full_group_id_path not in process_group_cache:
+ process_group_cache[full_group_id_path] = parent_group
parent_group_array.append(
{"id": parent_group.id, "display_name": parent_group.display_name}
)
- return parent_group_array
+ return {"cache": process_group_cache, "process_groups": parent_group_array}
+
+ @classmethod
+ def get_parent_group_array(cls, process_identifier: str) -> list[ProcessGroupLite]:
+ """Get_parent_group_array."""
+ parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it(
+ process_identifier, {}
+ )
+ return parent_group_lites_with_cache["process_groups"]
@classmethod
def get_process_groups(
@@ -339,8 +359,11 @@ class ProcessModelService(FileSystemService):
if len(problem_models) > 0:
raise ApiError(
error_code="existing_instances",
- message=f"We cannot delete the group `{process_group_id}`, "
- f"there are models with existing instances inside the group. {problem_models}",
+ message=(
+ f"We cannot delete the group `{process_group_id}`, there are"
+ " models with existing instances inside the group."
+ f" {problem_models}"
+ ),
)
shutil.rmtree(path)
self.cleanup_process_group_display_order()
@@ -392,7 +415,10 @@ class ProcessModelService(FileSystemService):
if process_group is None:
raise ApiError(
error_code="process_group_could_not_be_loaded_from_disk",
- message=f"We could not load the process_group from disk from: {dir_path}",
+ message=(
+ "We could not load the process_group from disk from:"
+ f" {dir_path}"
+ ),
)
else:
process_group_id = dir_path.replace(FileSystemService.root_path(), "")
@@ -457,7 +483,10 @@ class ProcessModelService(FileSystemService):
if process_model_info is None:
raise ApiError(
error_code="process_model_could_not_be_loaded_from_disk",
- message=f"We could not load the process_model from disk with data: {data}",
+ message=(
+ "We could not load the process_model from disk with data:"
+ f" {data}"
+ ),
)
else:
if name is None:
diff --git a/src/spiffworkflow_backend/services/script_unit_test_runner.py b/src/spiffworkflow_backend/services/script_unit_test_runner.py
index 9112e20f0..ed331672c 100644
--- a/src/spiffworkflow_backend/services/script_unit_test_runner.py
+++ b/src/spiffworkflow_backend/services/script_unit_test_runner.py
@@ -112,7 +112,10 @@ class ScriptUnitTestRunner:
except json.decoder.JSONDecodeError as ex:
return ScriptUnitTestResult(
result=False,
- error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}",
+ error=(
+ "Failed to parse expectedOutputJson:"
+ f" {unit_test['expectedOutputJson']}: {str(ex)}"
+ ),
)
script = task.task_spec.script
diff --git a/src/spiffworkflow_backend/services/secret_service.py b/src/spiffworkflow_backend/services/secret_service.py
index e4dee4913..aa9e6d147 100644
--- a/src/spiffworkflow_backend/services/secret_service.py
+++ b/src/spiffworkflow_backend/services/secret_service.py
@@ -44,8 +44,10 @@ class SecretService:
except Exception as e:
raise ApiError(
error_code="create_secret_error",
- message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. "
- f"Original error is {e}",
+ message=(
+ f"There was an error creating a secret with key: {key} and value"
+ f" ending with: {value[:-4]}. Original error is {e}"
+ ),
) from e
return secret_model
@@ -89,7 +91,9 @@ class SecretService:
else:
raise ApiError(
error_code="update_secret_error",
- message=f"Cannot update secret with key: {key}. Resource does not exist.",
+ message=(
+ f"Cannot update secret with key: {key}. Resource does not exist."
+ ),
status_code=404,
)
@@ -104,11 +108,16 @@ class SecretService:
except Exception as e:
raise ApiError(
error_code="delete_secret_error",
- message=f"Could not delete secret with key: {key}. Original error is: {e}",
+ message=(
+ f"Could not delete secret with key: {key}. Original error"
+ f" is: {e}"
+ ),
) from e
else:
raise ApiError(
error_code="delete_secret_error",
- message=f"Cannot delete secret with key: {key}. Resource does not exist.",
+ message=(
+ f"Cannot delete secret with key: {key}. Resource does not exist."
+ ),
status_code=404,
)
diff --git a/src/spiffworkflow_backend/services/spec_file_service.py b/src/spiffworkflow_backend/services/spec_file_service.py
index 72f59d1f7..4fdfbd6d1 100644
--- a/src/spiffworkflow_backend/services/spec_file_service.py
+++ b/src/spiffworkflow_backend/services/spec_file_service.py
@@ -192,7 +192,8 @@ class SpecFileService(FileSystemService):
full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
if not os.path.exists(full_file_path):
raise ProcessModelFileNotFoundError(
- f"No file found with name {file_name} in {process_model_info.display_name}"
+ f"No file found with name {file_name} in"
+ f" {process_model_info.display_name}"
)
with open(full_file_path, "rb") as f_handle:
spec_file_data = f_handle.read()
@@ -314,8 +315,9 @@ class SpecFileService(FileSystemService):
).first()
if message_model is None:
raise ValidationException(
- f"Could not find message model with identifier '{message_model_identifier}'"
- f"Required by a Start Event in : {ref.file_name}"
+ "Could not find message model with identifier"
+ f" '{message_model_identifier}'Required by a Start Event in :"
+ f" {ref.file_name}"
)
message_triggerable_process_model = (
MessageTriggerableProcessModel.query.filter_by(
@@ -335,7 +337,8 @@ class SpecFileService(FileSystemService):
!= ref.process_model_id
):
raise ValidationException(
- f"Message model is already used to start process model {ref.process_model_id}"
+ "Message model is already used to start process model"
+ f" {ref.process_model_id}"
)
@staticmethod
@@ -353,8 +356,9 @@ class SpecFileService(FileSystemService):
).first()
if message_model is None:
raise ValidationException(
- f"Could not find message model with identifier '{message_model_identifier}'"
- f"specified by correlation property: {cpre}"
+ "Could not find message model with identifier"
+ f" '{message_model_identifier}'specified by correlation"
+ f" property: {cpre}"
)
# fixme: I think we are currently ignoring the correction properties.
message_correlation_property = (
diff --git a/src/spiffworkflow_backend/services/user_service.py b/src/spiffworkflow_backend/services/user_service.py
index 0e8e65c2c..20412e549 100644
--- a/src/spiffworkflow_backend/services/user_service.py
+++ b/src/spiffworkflow_backend/services/user_service.py
@@ -7,12 +7,15 @@ from flask import g
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
-from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
+from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
+from spiffworkflow_backend.models.user_group_assignment_waiting import (
+ UserGroupAssignmentWaitingModel,
+)
class UserService:
@@ -21,11 +24,11 @@ class UserService:
@classmethod
def create_user(
cls,
+ username: str,
service: str,
service_id: str,
- name: Optional[str] = "",
- username: Optional[str] = "",
email: Optional[str] = "",
+ display_name: Optional[str] = "",
) -> UserModel:
"""Create_user."""
user_model: Optional[UserModel] = (
@@ -41,8 +44,8 @@ class UserService:
username=username,
service=service,
service_id=service_id,
- name=name,
email=email,
+ display_name=display_name,
)
db.session.add(user_model)
@@ -55,6 +58,7 @@ class UserService:
message=f"Could not add user {username}",
) from e
cls.create_principal(user_model.id)
+ UserService().apply_waiting_group_assignments(user_model)
return user_model
else:
@@ -69,45 +73,12 @@ class UserService:
)
)
- @classmethod
- def find_or_create_user(
- cls,
- service: str,
- service_id: str,
- name: Optional[str] = None,
- username: Optional[str] = None,
- email: Optional[str] = None,
- ) -> UserModel:
- """Find_or_create_user."""
- user_model: UserModel
- try:
- user_model = cls.create_user(
- service=service,
- service_id=service_id,
- name=name,
- username=username,
- email=email,
- )
- except ApiError:
- user_model = (
- UserModel.query.filter(UserModel.service == service)
- .filter(UserModel.service_id == service_id)
- .first()
- )
- return user_model
-
# Returns true if the current user is logged in.
@staticmethod
def has_user() -> bool:
"""Has_user."""
return "token" in g and bool(g.token) and "user" in g and bool(g.user)
- # Returns true if the given user uid is different from the current user's uid.
- @staticmethod
- def is_different_user(uid: str) -> bool:
- """Is_different_user."""
- return UserService.has_user() and uid is not None and uid is not g.user.uid
-
@staticmethod
def current_user() -> Any:
"""Current_user."""
@@ -117,20 +88,6 @@ class UserService:
)
return g.user
- @staticmethod
- def in_list(uids: list[str]) -> bool:
- """Returns true if the current user's id is in the given list of ids.
-
- False if there is no user, or the user is not in the list.
- """
- if (
- UserService.has_user()
- ): # If someone is logged in, lock tasks that don't belong to them.
- user = UserService.current_user()
- if user.uid in uids:
- return True
- return False
-
@staticmethod
def get_principal_by_user_id(user_id: int) -> PrincipalModel:
"""Get_principal_by_user_id."""
@@ -173,8 +130,57 @@ class UserService:
@classmethod
def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None:
"""Add_user_to_group."""
- ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
- db.session.add(ugam)
+ exists = (
+ UserGroupAssignmentModel()
+ .query.filter_by(user_id=user.id)
+ .filter_by(group_id=group.id)
+ .count()
+ )
+ if not exists:
+ ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
+ db.session.add(ugam)
+ db.session.commit()
+
+ @classmethod
+ def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None:
+ """Add_waiting_group_assignment."""
+ wugam = (
+ UserGroupAssignmentWaitingModel()
+ .query.filter_by(username=username)
+ .filter_by(group_id=group.id)
+ .first()
+ )
+ if not wugam:
+ wugam = UserGroupAssignmentWaitingModel(
+ username=username, group_id=group.id
+ )
+ db.session.add(wugam)
+ db.session.commit()
+ if wugam.is_match_all():
+ for user in UserModel.query.all():
+ cls.add_user_to_group(user, group)
+
+ @classmethod
+ def apply_waiting_group_assignments(cls, user: UserModel) -> None:
+ """Apply_waiting_group_assignments."""
+ waiting = (
+ UserGroupAssignmentWaitingModel()
+ .query.filter(UserGroupAssignmentWaitingModel.username == user.username)
+ .all()
+ )
+ for assignment in waiting:
+ cls.add_user_to_group(user, assignment.group)
+ db.session.delete(assignment)
+ wildcard = (
+ UserGroupAssignmentWaitingModel()
+ .query.filter(
+ UserGroupAssignmentWaitingModel.username
+ == UserGroupAssignmentWaitingModel.MATCH_ALL_USERS
+ )
+ .all()
+ )
+ for assignment in wildcard:
+ cls.add_user_to_group(user, assignment.group)
db.session.commit()
@staticmethod
@@ -192,15 +198,15 @@ class UserService:
return None
@classmethod
- def add_user_to_active_tasks_if_appropriate(cls, user: UserModel) -> None:
- """Add_user_to_active_tasks_if_appropriate."""
+ def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None:
+ """Add_user_to_human_tasks_if_appropriate."""
group_ids = [g.id for g in user.groups]
- active_tasks = ActiveTaskModel.query.filter(
- ActiveTaskModel.lane_assignment_id.in_(group_ids) # type: ignore
+ human_tasks = HumanTaskModel.query.filter(
+ HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore
).all()
- for active_task in active_tasks:
- active_task_user = ActiveTaskUserModel(
- user_id=user.id, active_task_id=active_task.id
+ for human_task in human_tasks:
+ human_task_user = HumanTaskUserModel(
+ user_id=user.id, human_task_id=human_task.id
)
- db.session.add(active_task_user)
+ db.session.add(human_task_user)
db.session.commit()
diff --git a/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn b/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn
new file mode 100644
index 000000000..2e33d429b
--- /dev/null
+++ b/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn
@@ -0,0 +1,45 @@
+
+
+
+
+ Flow_0gixxkm
+
+
+
+
+
+
+
+
+
+ Flow_0gixxkm
+ Flow_1oi9nsn
+
+
+ Flow_1oi9nsn
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json b/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json
new file mode 100644
index 000000000..ae61e4963
--- /dev/null
+++ b/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json
@@ -0,0 +1,6 @@
+{
+ "title": "{FORM_IDENTIFIER}",
+ "description": "",
+ "properties": {},
+ "required": []
+}
diff --git a/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json b/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json
new file mode 100644
index 000000000..654ce121f
--- /dev/null
+++ b/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json
@@ -0,0 +1,3 @@
+{
+ "ui:order": []
+}
diff --git a/tests/data/data_object_test/data_object.bpmn b/tests/data/data_object_test/data_object.bpmn
new file mode 100644
index 000000000..c112339e2
--- /dev/null
+++ b/tests/data/data_object_test/data_object.bpmn
@@ -0,0 +1,75 @@
+
+
+
+
+ Flow_0hnphp9
+
+
+
+ Flow_0hnphp9
+ Flow_0amajxh
+
+ DataObjectReference_10g8dit
+
+ the_data_object_var = 'hey'
+
+
+
+ Flow_1ifqo6o
+
+
+
+ Flow_0amajxh
+ Flow_1ifqo6o
+
+
+ DataObjectReference_10g8dit
+ Property_0a8w16m
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data/error/script_error_with_task_data.bpmn b/tests/data/error/script_error_with_task_data.bpmn
new file mode 100644
index 000000000..cd5f58aa4
--- /dev/null
+++ b/tests/data/error/script_error_with_task_data.bpmn
@@ -0,0 +1,86 @@
+
+
+
+
+ Flow_10jwwqy
+
+
+
+ Flow_1axnzv6
+
+
+
+
+
+ {
+ "current_user": {
+ "id": "2",
+ "username": "ciadmin1"
+ },
+ "num": 0
+}
+ {
+ "Mike": "Awesome",
+ "i": 2,
+ "current_user": {
+ "id": "2",
+ "username": "ciadmin1"
+ },
+ "num": 0,
+ "my_var": "whatwhat",
+ "person": "Kevin"
+}
+
+
+ {}
+ {}
+
+
+ {"current_user": {"id": "1", "username": "kb"}}
+ {"Mike": "Awesome", "current_user": {"id": "1", "username": "kb"}, "heyhey": "https://demo.spiffworkflow.org", "i": 2, "members": [], "my_var": "whatwhat", "person": "Kevin"}
+
+
+
+ Flow_10jwwqy
+ Flow_1utkzvj
+ my_var = 'THE VAR'
+
+
+
+
+ Flow_1utkzvj
+ Flow_1axnzv6
+ hey
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data/process_navigation/process_navigation.bpmn b/tests/data/process_navigation/process_navigation.bpmn
new file mode 100644
index 000000000..9f2f26bf4
--- /dev/null
+++ b/tests/data/process_navigation/process_navigation.bpmn
@@ -0,0 +1,137 @@
+
+
+
+
+
+
+
+ Flow_1l15rbh
+
+
+
+ Flow_1l15rbh
+ Flow_0d35i06
+ Flow_0tzaigt
+ Flow_1vld4r2
+
+
+
+ Flow_0d35i06
+ Flow_1w3n49n
+
+
+
+ Flow_0tzaigt
+ Flow_1q47ol8
+
+
+
+
+
+
+ Flow_1q47ol8
+
+
+
+
+ Flow_1w3n49n
+
+
+
+ Flow_1vld4r2
+ Flow_13ai5vv
+
+ timedelta(hours=1)
+
+
+
+
+ Click the button.
+
+ Flow_13ai5vv
+ Flow_1vwnf3n
+
+
+ Flow_1vwnf3n
+
+
+
+
+ result
+
+
+
+
+ result
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data/script_refresh_permissions/refresh_permisions.bpmn b/tests/data/script_refresh_permissions/refresh_permisions.bpmn
new file mode 100644
index 000000000..630cd1221
--- /dev/null
+++ b/tests/data/script_refresh_permissions/refresh_permisions.bpmn
@@ -0,0 +1,39 @@
+
+
+
+
+ Flow_01cweoc
+
+
+
+ Flow_1xle2yo
+
+
+
+ Flow_01cweoc
+ Flow_1xle2yo
+ refresh_permissions([])
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/spiffworkflow_backend/helpers/base_test.py b/tests/spiffworkflow_backend/helpers/base_test.py
index 48982fc60..df62f5be1 100644
--- a/tests/spiffworkflow_backend/helpers/base_test.py
+++ b/tests/spiffworkflow_backend/helpers/base_test.py
@@ -41,7 +41,7 @@ class BaseTest:
if isinstance(user, UserModel):
return user
- user = UserService.create_user("internal", username, username=username)
+ user = UserService.create_user(username, "internal", username)
if isinstance(user, UserModel):
return user
@@ -133,7 +133,6 @@ class BaseTest:
) -> TestResponse:
"""Create_process_model."""
if process_model_id is not None:
-
# make sure we have a group
process_group_id, _ = os.path.split(process_model_id)
modified_process_group_id = process_group_id.replace("/", ":")
@@ -141,7 +140,6 @@ class BaseTest:
os.path.join(FileSystemService.root_path(), process_group_id)
)
if ProcessModelService.is_group(process_group_path):
-
if exception_notification_addresses is None:
exception_notification_addresses = []
@@ -171,7 +169,8 @@ class BaseTest:
raise Exception("You must create the group first")
else:
raise Exception(
- "You must include the process_model_id, which must be a path to the model"
+ "You must include the process_model_id, which must be a path to the"
+ " model"
)
def get_test_data_file_contents(
@@ -243,7 +242,7 @@ class BaseTest:
return file
@staticmethod
- def create_process_instance_from_process_model_id(
+ def create_process_instance_from_process_model_id_with_api(
client: FlaskClient,
test_process_model_id: str,
headers: Dict[str, str],
@@ -324,13 +323,9 @@ class BaseTest:
permission_names: Optional[list[str]] = None,
) -> UserModel:
"""Add_permissions_to_user."""
- permission_target = PermissionTargetModel.query.filter_by(
- uri=target_uri
- ).first()
- if permission_target is None:
- permission_target = PermissionTargetModel(uri=target_uri)
- db.session.add(permission_target)
- db.session.commit()
+ permission_target = AuthorizationService.find_or_create_permission_target(
+ target_uri
+ )
if permission_names is None:
permission_names = [member.name for member in Permission]
@@ -359,11 +354,8 @@ class BaseTest:
assert has_permission is expected_result
def modify_process_identifier_for_path_param(self, identifier: str) -> str:
- """Identifier."""
- if "\\" in identifier:
- raise Exception(f"Found backslash in identifier: {identifier}")
-
- return identifier.replace("/", ":")
+ """Modify_process_identifier_for_path_param."""
+ return ProcessModelInfo.modify_process_identifier_for_path_param(identifier)
def un_modify_modified_process_identifier_for_path_param(
self, modified_identifier: str
diff --git a/tests/spiffworkflow_backend/integration/test_logging_service.py b/tests/spiffworkflow_backend/integration/test_logging_service.py
index f9dd44522..d27bbdc7c 100644
--- a/tests/spiffworkflow_backend/integration/test_logging_service.py
+++ b/tests/spiffworkflow_backend/integration/test_logging_service.py
@@ -45,7 +45,7 @@ class TestLoggingService(BaseTest):
user=with_super_admin_user,
)
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
diff --git a/tests/spiffworkflow_backend/integration/test_nested_groups.py b/tests/spiffworkflow_backend/integration/test_nested_groups.py
index 3983f9be8..90b5af88d 100644
--- a/tests/spiffworkflow_backend/integration/test_nested_groups.py
+++ b/tests/spiffworkflow_backend/integration/test_nested_groups.py
@@ -38,7 +38,7 @@ class TestNestedGroups(BaseTest):
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
@@ -99,7 +99,7 @@ class TestNestedGroups(BaseTest):
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
diff --git a/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/tests/spiffworkflow_backend/integration/test_openid_blueprint.py
index 20a0bb67b..ce1655cb9 100644
--- a/tests/spiffworkflow_backend/integration/test_openid_blueprint.py
+++ b/tests/spiffworkflow_backend/integration/test_openid_blueprint.py
@@ -1,4 +1,7 @@
"""Test_authentication."""
+import base64
+
+import jwt
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@@ -44,13 +47,16 @@ class TestFlaskOpenId(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
+ """Test_get_token."""
+ code = "testadmin1:1234123412341234"
+
"""It should be possible to get a token."""
- code = (
- "c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx"
- )
+ backend_basic_auth_string = code
+ backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
+ backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
headers = {
"Content-Type": "application/x-www-form-urlencoded",
- "Authorization": f"Basic {code}",
+ "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}",
}
data = {
"grant_type": "authorization_code",
@@ -59,3 +65,13 @@ class TestFlaskOpenId(BaseTest):
}
response = client.post("/openid/token", data=data, headers=headers)
assert response
+ assert response.is_json
+ assert "access_token" in response.json
+ assert "id_token" in response.json
+ assert "refresh_token" in response.json
+
+ decoded_token = jwt.decode(
+ response.json["id_token"], options={"verify_signature": False}
+ )
+ assert "iss" in decoded_token
+ assert "email" in decoded_token
diff --git a/tests/spiffworkflow_backend/integration/test_process_api.py b/tests/spiffworkflow_backend/integration/test_process_api.py
index 3bc21456e..ef34fe060 100644
--- a/tests/spiffworkflow_backend/integration/test_process_api.py
+++ b/tests/spiffworkflow_backend/integration/test_process_api.py
@@ -4,6 +4,7 @@ import json
import os
import time
from typing import Any
+from typing import Dict
import pytest
from flask.app import Flask
@@ -15,8 +16,8 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
)
-from spiffworkflow_backend.models.active_task import ActiveTaskModel
from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
@@ -162,6 +163,83 @@ class TestProcessApi(BaseTest):
assert process_model.primary_file_name == bpmn_file_name
assert process_model.primary_process_id == "sample"
+ def test_process_model_create_with_natural_language(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_process_model_create_with_natural_language."""
+ process_group_id = "test_process_group"
+ process_group_description = "Test Process Group"
+ process_model_id = "sample"
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_description
+ )
+
+ text = "Create a Bug Tracker process model "
+ text += (
+ "with a Bug Details form that collects summary, description, and priority"
+ )
+ body = {"natural_language_text": text}
+ self.create_process_model_with_api(
+ client,
+ process_model_id=process_model_identifier,
+ user=with_super_admin_user,
+ )
+ response = client.post(
+ f"/v1.0/process-models-natural-language/{process_group_id}",
+ content_type="application/json",
+ data=json.dumps(body),
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 201
+ assert response.json is not None
+ assert response.json["id"] == f"{process_group_id}/bug-tracker"
+ assert response.json["display_name"] == "Bug Tracker"
+ assert response.json["metadata_extraction_paths"] == [
+ {"key": "summary", "path": "summary"},
+ {"key": "description", "path": "description"},
+ {"key": "priority", "path": "priority"},
+ ]
+
+ process_model = ProcessModelService.get_process_model(response.json["id"])
+ process_model_path = os.path.join(
+ FileSystemService.root_path(),
+ FileSystemService.id_string_to_relative_path(process_model.id),
+ )
+
+ process_model_diagram = os.path.join(process_model_path, "bug-tracker.bpmn")
+ assert os.path.exists(process_model_diagram)
+ form_schema_json = os.path.join(process_model_path, "bug-details-schema.json")
+ assert os.path.exists(form_schema_json)
+ form_uischema_json = os.path.join(
+ process_model_path, "bug-details-uischema.json"
+ )
+ assert os.path.exists(form_uischema_json)
+
+ process_instance_report = ProcessInstanceReportModel.query.filter_by(
+ identifier="bug-tracker"
+ ).first()
+ assert process_instance_report is not None
+ report_column_accessors = [
+ i["accessor"] for i in process_instance_report.report_metadata["columns"]
+ ]
+ expected_column_accessors = [
+ "id",
+ "process_model_display_name",
+ "start_in_seconds",
+ "end_in_seconds",
+ "username",
+ "status",
+ "summary",
+ "description",
+ "priority",
+ ]
+ assert report_column_accessors == expected_column_accessors
+
def test_primary_process_id_updates_via_xml(
self,
app: Flask,
@@ -249,10 +327,6 @@ class TestProcessApi(BaseTest):
assert response.json is not None
assert response.json["ok"] is True
- # assert we no longer have a model
- with pytest.raises(ProcessEntityNotFoundError):
- ProcessModelService.get_process_model(process_model_identifier)
-
def test_process_model_delete_with_instances(
self,
app: Flask,
@@ -284,7 +358,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
# create an instance from a model
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
@@ -304,7 +378,8 @@ class TestProcessApi(BaseTest):
assert data["error_code"] == "existing_instances"
assert (
data["message"]
- == f"We cannot delete the model `{process_model_identifier}`, there are existing instances that depend on it."
+ == f"We cannot delete the model `{process_model_identifier}`, there are"
+ " existing instances that depend on it."
)
def test_process_model_update(
@@ -1072,7 +1147,7 @@ class TestProcessApi(BaseTest):
"""Test_process_instance_create."""
test_process_model_id = "runs_without_input/sample"
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, test_process_model_id, headers
)
assert response.json is not None
@@ -1102,7 +1177,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@@ -1144,7 +1219,7 @@ class TestProcessApi(BaseTest):
self.modify_process_identifier_for_path_param(process_model_identifier)
)
headers = self.logged_in_headers(with_super_admin_user)
- create_response = self.create_process_instance_from_process_model_id(
+ create_response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert create_response.json is not None
@@ -1191,7 +1266,7 @@ class TestProcessApi(BaseTest):
self.modify_process_identifier_for_path_param(process_model_identifier)
)
headers = self.logged_in_headers(with_super_admin_user)
- create_response = self.create_process_instance_from_process_model_id(
+ create_response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert create_response.json is not None
@@ -1299,7 +1374,7 @@ class TestProcessApi(BaseTest):
"andThis": "another_item_non_key",
}
}
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
@@ -1359,7 +1434,7 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
@@ -1375,7 +1450,7 @@ class TestProcessApi(BaseTest):
assert response.json is not None
response = client.post(
- f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/terminate",
+ f"/v1.0/process-instance-terminate/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -1396,20 +1471,18 @@ class TestProcessApi(BaseTest):
) -> None:
"""Test_process_instance_delete."""
process_group_id = "my_process_group"
- process_model_id = "user_task"
- bpmn_file_name = "user_task.bpmn"
- bpmn_file_location = "user_task"
+ process_model_id = "sample"
+ bpmn_file_location = "sample"
process_model_identifier = self.create_group_and_model_with_bpmn(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
- bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@@ -1420,11 +1493,13 @@ class TestProcessApi(BaseTest):
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
+ assert response.status_code == 200
delete_response = client.delete(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
+ assert delete_response.json["ok"] is True
assert delete_response.status_code == 200
def test_task_show(
@@ -1448,7 +1523,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@@ -1462,15 +1537,15 @@ class TestProcessApi(BaseTest):
assert response.json is not None
assert response.json["next_task"] is not None
- active_tasks = (
- db.session.query(ActiveTaskModel)
- .filter(ActiveTaskModel.process_instance_id == process_instance_id)
+ human_tasks = (
+ db.session.query(HumanTaskModel)
+ .filter(HumanTaskModel.process_instance_id == process_instance_id)
.all()
)
- assert len(active_tasks) == 1
- active_task = active_tasks[0]
+ assert len(human_tasks) == 1
+ human_task = human_tasks[0]
response = client.get(
- f"/v1.0/tasks/{process_instance_id}/{active_task.task_id}",
+ f"/v1.0/tasks/{process_instance_id}/{human_task.task_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
@@ -1499,7 +1574,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
- self.create_process_instance_from_process_model_id(
+ self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
@@ -1546,19 +1621,19 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
headers = self.logged_in_headers(with_super_admin_user)
- self.create_process_instance_from_process_model_id(
+ self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
- self.create_process_instance_from_process_model_id(
+ self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
- self.create_process_instance_from_process_model_id(
+ self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
- self.create_process_instance_from_process_model_id(
+ self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
- self.create_process_instance_from_process_model_id(
+ self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
@@ -1872,7 +1947,7 @@ class TestProcessApi(BaseTest):
) -> Any:
"""Setup_testing_instance."""
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_id, headers
)
process_instance = response.json
@@ -2019,7 +2094,6 @@ class TestProcessApi(BaseTest):
mail = app.config["MAIL_APP"]
with mail.record_messages() as outbox:
-
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
@@ -2041,6 +2115,36 @@ class TestProcessApi(BaseTest):
assert process is not None
assert process.status == "error"
+ def test_task_data_is_set_even_if_process_instance_errors(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_task_data_is_set_even_if_process_instance_errors."""
+ process_model = load_test_spec(
+ process_model_id="group/error_with_task_data",
+ bpmn_file_name="script_error_with_task_data.bpmn",
+ process_model_source_directory="error",
+ )
+ process_instance = self.create_process_instance_from_process_model(
+ process_model=process_model, user=with_super_admin_user
+ )
+
+ response = client.post(
+ f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 400
+ assert process_instance.status == "error"
+ processor = ProcessInstanceProcessor(process_instance)
+ spiff_task = processor.get_task_by_bpmn_identifier(
+ "script_task_one", processor.bpmn_process_instance
+ )
+ assert spiff_task is not None
+ assert spiff_task.data != {}
+
def test_process_model_file_create(
self,
app: Flask,
@@ -2195,7 +2299,7 @@ class TestProcessApi(BaseTest):
# process_group_id="finance",
# )
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client,
# process_model.process_group_id,
process_model_identifier,
@@ -2404,7 +2508,7 @@ class TestProcessApi(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
@@ -2421,7 +2525,7 @@ class TestProcessApi(BaseTest):
assert process_instance.status == "user_input_required"
client.post(
- f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/suspend",
+ f"/v1.0/process-instance-suspend/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
process_instance = ProcessInstanceService().get_process_instance(
@@ -2429,15 +2533,25 @@ class TestProcessApi(BaseTest):
)
assert process_instance.status == "suspended"
- # TODO: Why can I run a suspended process instance?
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
+ process_instance = ProcessInstanceService().get_process_instance(
+ process_instance_id
+ )
+ assert process_instance.status == "suspended"
+ assert response.status_code == 400
- # task = response.json['next_task']
-
- print("test_process_instance_suspend")
+ response = client.post(
+ f"/v1.0/process-instance-resume/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 200
+ process_instance = ProcessInstanceService().get_process_instance(
+ process_instance_id
+ )
+ assert process_instance.status == "waiting"
def test_script_unit_test_run(
self,
@@ -2497,6 +2611,148 @@ class TestProcessApi(BaseTest):
print("test_script_unit_test_run")
+ def test_send_event(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_script_unit_test_run."""
+ process_group_id = "test_group"
+ process_model_id = "process_navigation"
+ bpmn_file_name = "process_navigation.bpmn"
+ bpmn_file_location = "process_navigation"
+ process_model_identifier = self.create_group_and_model_with_bpmn(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
+
+ headers = self.logged_in_headers(with_super_admin_user)
+ response = self.create_process_instance_from_process_model_id_with_api(
+ client, process_model_identifier, headers
+ )
+ process_instance_id = response.json["id"]
+
+ client.post(
+ f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ # This is exactly the same the test above, but some reason I to a totally irrelevant type.
+ data: Dict = {
+ "correlation_properties": [],
+ "expression": None,
+ "external": True,
+ "internal": False,
+ "payload": {"message": "message 1"},
+ "name": "Message 1",
+ "typename": "MessageEventDefinition",
+ }
+ response = client.post(
+ f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(data),
+ )
+ assert response.json["status"] == "complete"
+
+ response = client.get(
+ f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?all_tasks=true",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 200
+ end = next(task for task in response.json if task["name"] == "End")
+ assert end["data"]["result"] == {"message": "message 1"}
+
+ def test_manual_complete_task(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_script_unit_test_run."""
+ process_group_id = "test_group"
+ process_model_id = "process_navigation"
+ bpmn_file_name = "process_navigation.bpmn"
+ bpmn_file_location = "process_navigation"
+ process_model_identifier = self.create_group_and_model_with_bpmn(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
+
+ headers = self.logged_in_headers(with_super_admin_user)
+ response = self.create_process_instance_from_process_model_id_with_api(
+ client, process_model_identifier, headers
+ )
+ process_instance_id = response.json["id"]
+
+ client.post(
+ f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ data = {
+ "dateTime": "timedelta(hours=1)",
+ "external": True,
+ "internal": True,
+ "label": "Event_0e4owa3",
+ "typename": "TimerEventDefinition",
+ }
+ response = client.post(
+ f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(data),
+ )
+
+ response = client.get(
+ f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert len(response.json) == 1
+ task = response.json[0]
+
+ response = client.post(
+ f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ )
+ assert response.json["status"] == "suspended"
+
def setup_initial_groups_for_move_tests(
self, client: FlaskClient, with_super_admin_user: UserModel
) -> None:
@@ -2740,7 +2996,9 @@ class TestProcessApi(BaseTest):
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec(
- process_model_id="save_process_instance_metadata/save_process_instance_metadata",
+ process_model_id=(
+ "save_process_instance_metadata/save_process_instance_metadata"
+ ),
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
@@ -2797,7 +3055,9 @@ class TestProcessApi(BaseTest):
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec(
- process_model_id="save_process_instance_metadata/save_process_instance_metadata",
+ process_model_id=(
+ "save_process_instance_metadata/save_process_instance_metadata"
+ ),
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
@@ -2918,3 +3178,31 @@ class TestProcessApi(BaseTest):
assert len(response.json["results"]) == 2
assert response.json["results"][1]["id"] == process_instance_one.id
assert response.json["results"][0]["id"] == process_instance_two.id
+
+ def test_process_data_show(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_process_data_show."""
+ process_model = load_test_spec(
+ "test_group/data_object_test",
+ process_model_source_directory="data_object_test",
+ )
+ process_instance_one = self.create_process_instance_from_process_model(
+ process_model
+ )
+ processor = ProcessInstanceProcessor(process_instance_one)
+ processor.do_engine_steps(save=True)
+ assert process_instance_one.status == "user_input_required"
+
+ response = client.get(
+ f"/v1.0/process-data/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance_one.id}/the_data_object_var",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ assert response.status_code == 200
+ assert response.json is not None
+ assert response.json["process_data_value"] == "hey"
diff --git a/tests/spiffworkflow_backend/integration/test_process_instances_controller.py b/tests/spiffworkflow_backend/integration/test_process_instances_controller.py
new file mode 100644
index 000000000..8cb1768a6
--- /dev/null
+++ b/tests/spiffworkflow_backend/integration/test_process_instances_controller.py
@@ -0,0 +1,61 @@
+"""Test_users_controller."""
+from flask.app import Flask
+from flask.testing import FlaskClient
+from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+
+from spiffworkflow_backend.models.user import UserModel
+
+
+class TestProcessInstancesController(BaseTest):
+ """TestProcessInstancesController."""
+
+ def test_find_by_id(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_user_search_returns_a_user."""
+ user_one = self.create_user_with_permission(
+ username="user_one", target_uri="/process-instances/find-by-id/*"
+ )
+ user_two = self.create_user_with_permission(
+ username="user_two", target_uri="/process-instances/find-by-id/*"
+ )
+
+ process_model = load_test_spec(
+ process_model_id="group/sample",
+ bpmn_file_name="sample.bpmn",
+ process_model_source_directory="sample",
+ )
+ process_instance = self.create_process_instance_from_process_model(
+ process_model=process_model, user=user_one
+ )
+
+ response = client.get(
+ f"/v1.0/process-instances/find-by-id/{process_instance.id}",
+ headers=self.logged_in_headers(user_one),
+ )
+ assert response.status_code == 200
+ assert response.json
+ assert "process_instance" in response.json
+ assert response.json["process_instance"]["id"] == process_instance.id
+ assert response.json["uri_type"] == "for-me"
+
+ response = client.get(
+ f"/v1.0/process-instances/find-by-id/{process_instance.id}",
+ headers=self.logged_in_headers(user_two),
+ )
+ assert response.status_code == 400
+
+ response = client.get(
+ f"/v1.0/process-instances/find-by-id/{process_instance.id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 200
+ assert response.json
+ assert "process_instance" in response.json
+ assert response.json["process_instance"]["id"] == process_instance.id
+ assert response.json["uri_type"] is None
diff --git a/tests/spiffworkflow_backend/integration/test_users_controller.py b/tests/spiffworkflow_backend/integration/test_users_controller.py
new file mode 100644
index 000000000..c1c62705f
--- /dev/null
+++ b/tests/spiffworkflow_backend/integration/test_users_controller.py
@@ -0,0 +1,47 @@
+"""Test_users_controller."""
+from flask.app import Flask
+from flask.testing import FlaskClient
+from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+
+from spiffworkflow_backend.models.user import UserModel
+
+
+class TestUsersController(BaseTest):
+ """TestUsersController."""
+
+ def test_user_search_returns_a_user(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_user_search_returns_a_user."""
+ self.find_or_create_user(username="aa")
+ self.find_or_create_user(username="ab")
+ self.find_or_create_user(username="abc")
+ self.find_or_create_user(username="ac")
+
+ self._assert_search_has_count(client, with_super_admin_user, "aa", 1)
+ self._assert_search_has_count(client, with_super_admin_user, "ab", 2)
+ self._assert_search_has_count(client, with_super_admin_user, "ac", 1)
+ self._assert_search_has_count(client, with_super_admin_user, "ad", 0)
+ self._assert_search_has_count(client, with_super_admin_user, "a", 4)
+
+ def _assert_search_has_count(
+ self,
+ client: FlaskClient,
+ with_super_admin_user: UserModel,
+ username_prefix: str,
+ expected_count: int,
+ ) -> None:
+ """_assert_search_has_count."""
+ response = client.get(
+ f"/v1.0/users/search?username_prefix={username_prefix}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 200
+ assert response.json
+ assert response.json["users"] is not None
+ assert response.json["username_prefix"] == username_prefix
+ assert len(response.json["users"]) == expected_count
diff --git a/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py b/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py
new file mode 100644
index 000000000..cbf625168
--- /dev/null
+++ b/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py
@@ -0,0 +1,60 @@
+"""Test_get_localtime."""
+from flask.app import Flask
+from flask.testing import FlaskClient
+from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+
+from spiffworkflow_backend.models.script_attributes_context import (
+ ScriptAttributesContext,
+)
+from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.scripts.get_all_permissions import GetAllPermissions
+from spiffworkflow_backend.services.authorization_service import AuthorizationService
+
+
+class TestGetAllPermissions(BaseTest):
+ """TestGetAllPermissions."""
+
+ def test_can_get_all_permissions(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_can_get_all_permissions."""
+ self.find_or_create_user("test_user")
+
+ # now that we have everything, try to clear it out...
+ script_attributes_context = ScriptAttributesContext(
+ task=None,
+ environment_identifier="testing",
+ process_instance_id=1,
+ process_model_identifier="my_test_user",
+ )
+ AuthorizationService.add_permission_from_uri_or_macro(
+ permission="start", target="PG:hey:group", group_identifier="my_test_group"
+ )
+ AuthorizationService.add_permission_from_uri_or_macro(
+ permission="all", target="/tasks", group_identifier="my_test_group"
+ )
+
+ expected_permissions = [
+ {
+ "group_identifier": "my_test_group",
+ "uri": "/process-instances/hey:group:*",
+ "permissions": ["create"],
+ },
+ {
+ "group_identifier": "my_test_group",
+ "uri": "/process-instances/for-me/hey:group:*",
+ "permissions": ["read"],
+ },
+ {
+ "group_identifier": "my_test_group",
+ "uri": "/tasks",
+ "permissions": ["create", "read", "update", "delete"],
+ },
+ ]
+
+ permissions = GetAllPermissions().run(script_attributes_context)
+ assert permissions == expected_permissions
diff --git a/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/tests/spiffworkflow_backend/scripts/test_get_localtime.py
index f1834ab3a..90e4158da 100644
--- a/tests/spiffworkflow_backend/scripts/test_get_localtime.py
+++ b/tests/spiffworkflow_backend/scripts/test_get_localtime.py
@@ -68,9 +68,9 @@ class TestGetLocaltime(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
- active_task = process_instance.active_tasks[0]
+ human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
ProcessInstanceService.complete_form_task(
@@ -78,12 +78,12 @@ class TestGetLocaltime(BaseTest):
spiff_task,
{"timezone": "US/Pacific"},
initiator_user,
- active_task,
+ human_task,
)
- active_task = process_instance.active_tasks[0]
+ human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
assert spiff_task
diff --git a/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py b/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py
new file mode 100644
index 000000000..67cf55c85
--- /dev/null
+++ b/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py
@@ -0,0 +1,50 @@
+"""Test_get_localtime."""
+import pytest
+from flask.app import Flask
+from flask.testing import FlaskClient
+from flask_bpmn.api.api_error import ApiError
+from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+
+from spiffworkflow_backend.services.process_instance_processor import (
+ ProcessInstanceProcessor,
+)
+
+
+class TestRefreshPermissions(BaseTest):
+ """TestRefreshPermissions."""
+
+ def test_refresh_permissions_requires_elevated_permission(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_refresh_permissions_requires_elevated_permission."""
+ basic_user = self.find_or_create_user("basic_user")
+ privileged_user = self.find_or_create_user("privileged_user")
+ self.add_permissions_to_user(
+ privileged_user,
+ target_uri="/can-run-privileged-script/refresh_permissions",
+ permission_names=["create"],
+ )
+ process_model = load_test_spec(
+ process_model_id="refresh_permissions",
+ process_model_source_directory="script_refresh_permissions",
+ )
+ process_instance = self.create_process_instance_from_process_model(
+ process_model=process_model, user=basic_user
+ )
+
+ processor = ProcessInstanceProcessor(process_instance)
+
+ with pytest.raises(ApiError) as exception:
+ processor.do_engine_steps(save=True)
+ assert "ScriptUnauthorizedForUserError" in str(exception)
+
+ process_instance = self.create_process_instance_from_process_model(
+ process_model=process_model, user=privileged_user
+ )
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.do_engine_steps(save=True)
+ assert process_instance.status == "complete"
diff --git a/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py
index 96eb62970..738896cd7 100644
--- a/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py
+++ b/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py
@@ -24,17 +24,18 @@ class TestSaveProcessInstanceMetadata(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_can_save_process_instance_metadata."""
- initiator_user = self.find_or_create_user("initiator_user")
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec(
- process_model_id="save_process_instance_metadata/save_process_instance_metadata",
+ process_model_id=(
+ "save_process_instance_metadata/save_process_instance_metadata"
+ ),
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
process_instance = self.create_process_instance_from_process_model(
- process_model=process_model, user=initiator_user
+ process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
diff --git a/tests/spiffworkflow_backend/unit/test_authorization_service.py b/tests/spiffworkflow_backend/unit/test_authorization_service.py
index 00622a1f7..9e7af5d0f 100644
--- a/tests/spiffworkflow_backend/unit/test_authorization_service.py
+++ b/tests/spiffworkflow_backend/unit/test_authorization_service.py
@@ -4,9 +4,12 @@ from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
+from spiffworkflow_backend.services.authorization_service import InvalidPermissionError
+from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@@ -14,6 +17,7 @@ from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
+from spiffworkflow_backend.services.user_service import UserService
class TestAuthorizationService(BaseTest):
@@ -90,14 +94,14 @@ class TestAuthorizationService(BaseTest):
users["testuser2"], "read", "/v1.0/process-groups/"
)
- def test_user_can_be_added_to_active_task_on_first_login(
+ def test_user_can_be_added_to_human_task_on_first_login(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
- """Test_user_can_be_added_to_active_task_on_first_login."""
+ """Test_user_can_be_added_to_human_task_on_first_login."""
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None
# to ensure there is a user that can be assigned to the task
@@ -121,21 +125,295 @@ class TestAuthorizationService(BaseTest):
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
- active_task = process_instance.active_tasks[0]
+ human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
- active_task = process_instance.active_tasks[0]
+ human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
finance_user = AuthorizationService.create_user_from_sign_in(
- {"username": "testuser2", "sub": "open_id"}
+ {
+ "username": "testuser2",
+ "sub": "testuser2",
+ "iss": "https://test.stuff",
+ "email": "testuser2",
+ }
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, finance_user, active_task
+ processor, spiff_task, {}, finance_user, human_task
)
+
+ def test_explode_permissions_all_on_process_group(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_all_on_process_group."""
+ expected_permissions = [
+ ("/logs/some-process-group:some-process-model:*", "read"),
+ ("/process-data/some-process-group:some-process-model:*", "read"),
+ ("/process-groups/some-process-group:some-process-model:*", "create"),
+ ("/process-groups/some-process-group:some-process-model:*", "delete"),
+ ("/process-groups/some-process-group:some-process-model:*", "read"),
+ ("/process-groups/some-process-group:some-process-model:*", "update"),
+ (
+ "/process-instance-suspend/some-process-group:some-process-model:*",
+ "create",
+ ),
+ (
+ "/process-instance-terminate/some-process-group:some-process-model:*",
+ "create",
+ ),
+ ("/process-instances/some-process-group:some-process-model:*", "create"),
+ ("/process-instances/some-process-group:some-process-model:*", "delete"),
+ ("/process-instances/some-process-group:some-process-model:*", "read"),
+ ("/process-models/some-process-group:some-process-model:*", "create"),
+ ("/process-models/some-process-group:some-process-model:*", "delete"),
+ ("/process-models/some-process-group:some-process-model:*", "read"),
+ ("/process-models/some-process-group:some-process-model:*", "update"),
+ ("/task-data/some-process-group:some-process-model:*", "read"),
+ ("/task-data/some-process-group:some-process-model:*", "update"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions(
+ "all", "PG:/some-process-group/some-process-model"
+ )
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_explode_permissions_start_on_process_group(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_start_on_process_group."""
+ expected_permissions = [
+ (
+ "/process-instances/for-me/some-process-group:some-process-model:*",
+ "read",
+ ),
+ ("/process-instances/some-process-group:some-process-model:*", "create"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions(
+ "start", "PG:/some-process-group/some-process-model"
+ )
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_explode_permissions_all_on_process_model(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_all_on_process_model."""
+ expected_permissions = [
+ ("/logs/some-process-group:some-process-model/*", "read"),
+ ("/process-data/some-process-group:some-process-model/*", "read"),
+ (
+ "/process-instance-suspend/some-process-group:some-process-model/*",
+ "create",
+ ),
+ (
+ "/process-instance-terminate/some-process-group:some-process-model/*",
+ "create",
+ ),
+ ("/process-instances/some-process-group:some-process-model/*", "create"),
+ ("/process-instances/some-process-group:some-process-model/*", "delete"),
+ ("/process-instances/some-process-group:some-process-model/*", "read"),
+ ("/process-models/some-process-group:some-process-model/*", "create"),
+ ("/process-models/some-process-group:some-process-model/*", "delete"),
+ ("/process-models/some-process-group:some-process-model/*", "read"),
+ ("/process-models/some-process-group:some-process-model/*", "update"),
+ ("/task-data/some-process-group:some-process-model/*", "read"),
+ ("/task-data/some-process-group:some-process-model/*", "update"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions(
+ "all", "PM:/some-process-group/some-process-model"
+ )
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_explode_permissions_start_on_process_model(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_start_on_process_model."""
+ expected_permissions = [
+ (
+ "/process-instances/for-me/some-process-group:some-process-model/*",
+ "read",
+ ),
+ ("/process-instances/some-process-group:some-process-model/*", "create"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions(
+ "start", "PM:/some-process-group/some-process-model"
+ )
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_explode_permissions_basic(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_basic."""
+ expected_permissions = [
+ ("/process-instances/find-by-id/*", "read"),
+ ("/process-instances/for-me", "read"),
+ ("/process-instances/reports/*", "create"),
+ ("/process-instances/reports/*", "delete"),
+ ("/process-instances/reports/*", "read"),
+ ("/process-instances/reports/*", "update"),
+ ("/processes", "read"),
+ ("/service-tasks", "read"),
+ ("/tasks/*", "create"),
+ ("/tasks/*", "delete"),
+ ("/tasks/*", "read"),
+ ("/tasks/*", "update"),
+ ("/user-groups/for-current-user", "read"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions("all", "BASIC")
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_explode_permissions_all(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_all."""
+ expected_permissions = [
+ ("/*", "create"),
+ ("/*", "delete"),
+ ("/*", "read"),
+ ("/*", "update"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions("all", "ALL")
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_explode_permissions_with_target_uri(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_with_target_uri."""
+ expected_permissions = [
+ ("/hey/model", "create"),
+ ("/hey/model", "delete"),
+ ("/hey/model", "read"),
+ ("/hey/model", "update"),
+ ]
+ permissions_to_assign = AuthorizationService.explode_permissions(
+ "all", "/hey/model"
+ )
+ permissions_to_assign_tuples = sorted(
+ [(p.target_uri, p.permission) for p in permissions_to_assign]
+ )
+ assert permissions_to_assign_tuples == expected_permissions
+
+ def test_granting_access_to_group_gives_access_to_group_and_subgroups(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_granting_access_to_group_gives_access_to_group_and_subgroups."""
+ user = self.find_or_create_user(username="user_one")
+ user_group = GroupService.find_or_create_group("group_one")
+ UserService.add_user_to_group(user, user_group)
+ AuthorizationService.add_permission_from_uri_or_macro(
+ user_group.identifier, "read", "PG:hey"
+ )
+ self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
+ self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
+
+ def test_explode_permissions_with_invalid_target_uri(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_with_invalid_target_uri."""
+ with pytest.raises(InvalidPermissionError):
+ AuthorizationService.explode_permissions("all", "BAD_MACRO")
+
+ def test_explode_permissions_with_start_to_incorrect_target(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_explode_permissions_with_start_to_incorrect_target."""
+ with pytest.raises(InvalidPermissionError):
+ AuthorizationService.explode_permissions("start", "/hey/model")
+
+ def test_can_refresh_permissions(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_can_refresh_permissions."""
+ user = self.find_or_create_user(username="user_one")
+ admin_user = self.find_or_create_user(username="testadmin1")
+
+ # this group is not mentioned so it will get deleted
+ GroupService.find_or_create_group("group_two")
+ assert GroupModel.query.filter_by(identifier="group_two").first() is not None
+
+ group_info = [
+ {
+ "users": ["user_one"],
+ "name": "group_one",
+ "permissions": [{"actions": ["create", "read"], "uri": "PG:hey"}],
+ }
+ ]
+ AuthorizationService.refresh_permissions(group_info)
+ assert GroupModel.query.filter_by(identifier="group_two").first() is None
+ assert GroupModel.query.filter_by(identifier="group_one").first() is not None
+ self.assert_user_has_permission(admin_user, "create", "/anything-they-want")
+ self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
+ self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
+ self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo")
+
+ group_info = [
+ {
+ "users": ["user_one"],
+ "name": "group_one",
+ "permissions": [{"actions": ["read"], "uri": "PG:hey"}],
+ }
+ ]
+ AuthorizationService.refresh_permissions(group_info)
+ assert GroupModel.query.filter_by(identifier="group_one").first() is not None
+ self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
+ self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
+ self.assert_user_has_permission(
+ user, "create", "/v1.0/process-groups/hey:yo", expected_result=False
+ )
+ self.assert_user_has_permission(admin_user, "create", "/anything-they-want")
diff --git a/tests/spiffworkflow_backend/unit/test_dot_notation.py b/tests/spiffworkflow_backend/unit/test_dot_notation.py
index 80b052544..59a0fee8d 100644
--- a/tests/spiffworkflow_backend/unit/test_dot_notation.py
+++ b/tests/spiffworkflow_backend/unit/test_dot_notation.py
@@ -37,7 +37,7 @@ class TestDotNotation(BaseTest):
)
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance_from_process_model_id(
+ response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
process_instance_id = response.json["id"]
@@ -47,7 +47,7 @@ class TestDotNotation(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
- active_task = process_instance.active_tasks[0]
+ human_task = process_instance.human_tasks[0]
user_task = processor.get_ready_user_tasks()[0]
form_data = {
@@ -58,7 +58,7 @@ class TestDotNotation(BaseTest):
"invoice.dueDate": "09/30/2022",
}
ProcessInstanceService.complete_form_task(
- processor, user_task, form_data, with_super_admin_user, active_task
+ processor, user_task, form_data, with_super_admin_user, human_task
)
expected = {
diff --git a/tests/spiffworkflow_backend/unit/test_permissions.py b/tests/spiffworkflow_backend/unit/test_permissions.py
index b66f32370..a96989697 100644
--- a/tests/spiffworkflow_backend/unit/test_permissions.py
+++ b/tests/spiffworkflow_backend/unit/test_permissions.py
@@ -16,6 +16,7 @@ from spiffworkflow_backend.services.user_service import UserService
# we think we can get the list of roles for a user.
# spiff needs a way to determine what each role allows.
+
# user role allows list and read of all process groups/models
# super-admin role allows create, update, and delete of all process groups/models
# * super-admins users maybe conventionally get the user role as well
diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
index 3e0107957..b4a650dc6 100644
--- a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
+++ b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
@@ -31,10 +31,14 @@ class TestProcessInstanceProcessor(BaseTest):
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_script_engine_takes_data_and_returns_expected_results."""
+ app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
+ app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
script_engine = ProcessInstanceProcessor._script_engine
result = script_engine._evaluate("a", {"a": 1})
assert result == 1
+ app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
+ app.config["THREAD_LOCAL_DATA"].process_instance_id = None
def test_script_engine_can_use_custom_scripts(
self,
@@ -42,21 +46,26 @@ class TestProcessInstanceProcessor(BaseTest):
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_script_engine_takes_data_and_returns_expected_results."""
+ app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
+ app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
script_engine = ProcessInstanceProcessor._script_engine
result = script_engine._evaluate("fact_service(type='norris')", {})
assert (
result
- == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants."
+ == "Chuck Norris doesn’t read books. He stares them down until he gets the"
+ " information he wants."
)
+ app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
+ app.config["THREAD_LOCAL_DATA"].process_instance_id = None
- def test_sets_permission_correctly_on_active_task(
+ def test_sets_permission_correctly_on_human_task(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
- """Test_sets_permission_correctly_on_active_task."""
+ """Test_sets_permission_correctly_on_human_task."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
@@ -80,63 +89,63 @@ class TestProcessInstanceProcessor(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id is None
- assert len(active_task.potential_owners) == 1
- assert active_task.potential_owners[0] == initiator_user
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id is None
+ assert len(human_task.potential_owners) == 1
+ assert human_task.potential_owners[0] == initiator_user
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
with pytest.raises(UserDoesNotHaveAccessToTaskError):
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, finance_user, active_task
+ processor, spiff_task, {}, finance_user, human_task
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id == finance_group.id
- assert len(active_task.potential_owners) == 1
- assert active_task.potential_owners[0] == finance_user
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id == finance_group.id
+ assert len(human_task.potential_owners) == 1
+ assert human_task.potential_owners[0] == finance_user
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
with pytest.raises(UserDoesNotHaveAccessToTaskError):
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, finance_user, active_task
+ processor, spiff_task, {}, finance_user, human_task
)
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id is None
- assert len(active_task.potential_owners) == 1
- assert active_task.potential_owners[0] == initiator_user
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id is None
+ assert len(human_task.potential_owners) == 1
+ assert human_task.potential_owners[0] == initiator_user
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
assert process_instance.status == ProcessInstanceStatus.complete.value
- def test_sets_permission_correctly_on_active_task_when_using_dict(
+ def test_sets_permission_correctly_on_human_task_when_using_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
- """Test_sets_permission_correctly_on_active_task_when_using_dict."""
+ """Test_sets_permission_correctly_on_human_task_when_using_dict."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
@@ -163,94 +172,97 @@ class TestProcessInstanceProcessor(BaseTest):
processor.do_engine_steps(save=True)
processor.save()
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id is None
- assert len(active_task.potential_owners) == 1
- assert active_task.potential_owners[0] == initiator_user
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id is None
+ assert len(human_task.potential_owners) == 1
+ assert human_task.potential_owners[0] == initiator_user
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
with pytest.raises(UserDoesNotHaveAccessToTaskError):
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, finance_user_three, active_task
+ processor, spiff_task, {}, finance_user_three, human_task
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
+ assert human_task.completed_by_user_id == initiator_user.id
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id is None
- assert len(active_task.potential_owners) == 2
- assert active_task.potential_owners == [finance_user_three, finance_user_four]
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id is None
+ assert len(human_task.potential_owners) == 2
+ assert human_task.potential_owners == [finance_user_three, finance_user_four]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
with pytest.raises(UserDoesNotHaveAccessToTaskError):
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
g.user = finance_user_three
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, finance_user_three, active_task
+ processor, spiff_task, {}, finance_user_three, human_task
)
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id is None
- assert len(active_task.potential_owners) == 1
- assert active_task.potential_owners[0] == finance_user_four
+ assert human_task.completed_by_user_id == finance_user_three.id
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id is None
+ assert len(human_task.potential_owners) == 1
+ assert human_task.potential_owners[0] == finance_user_four
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
with pytest.raises(UserDoesNotHaveAccessToTaskError):
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, finance_user_four, active_task
+ processor, spiff_task, {}, finance_user_four, human_task
)
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
- assert active_task.lane_assignment_id is None
- assert len(active_task.potential_owners) == 1
- assert active_task.potential_owners[0] == initiator_user
+ assert human_task.completed_by_user_id == finance_user_four.id
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
+ assert human_task.lane_assignment_id is None
+ assert len(human_task.potential_owners) == 1
+ assert human_task.potential_owners[0] == initiator_user
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
- assert len(process_instance.active_tasks) == 1
- active_task = process_instance.active_tasks[0]
+ assert len(process_instance.active_human_tasks) == 1
+ human_task = process_instance.active_human_tasks[0]
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
- active_task.task_name, processor.bpmn_process_instance
+ human_task.task_name, processor.bpmn_process_instance
)
with pytest.raises(UserDoesNotHaveAccessToTaskError):
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, initiator_user, active_task
+ processor, spiff_task, {}, initiator_user, human_task
)
ProcessInstanceService.complete_form_task(
- processor, spiff_task, {}, testadmin1, active_task
+ processor, spiff_task, {}, testadmin1, human_task
)
assert process_instance.status == ProcessInstanceStatus.complete.value
- def test_does_not_recreate_active_tasks_on_multiple_saves(
+ def test_does_not_recreate_human_tasks_on_multiple_saves(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
- """Test_sets_permission_correctly_on_active_task_when_using_dict."""
+ """Test_does_not_recreate_human_tasks_on_multiple_saves."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
@@ -273,11 +285,11 @@ class TestProcessInstanceProcessor(BaseTest):
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
- assert len(process_instance.active_tasks) == 1
- initial_active_task_id = process_instance.active_tasks[0].id
+ assert len(process_instance.active_human_tasks) == 1
+ initial_human_task_id = process_instance.active_human_tasks[0].id
- # save again to ensure we go attempt to process the active tasks again
+ # save again to ensure we go attempt to process the human tasks again
processor.save()
- assert len(process_instance.active_tasks) == 1
- assert initial_active_task_id == process_instance.active_tasks[0].id
+ assert len(process_instance.active_human_tasks) == 1
+ assert initial_human_task_id == process_instance.active_human_tasks[0].id
diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py b/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py
index 98412faa3..b40412ff8 100644
--- a/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py
+++ b/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py
@@ -3,8 +3,12 @@ from typing import Optional
from flask import Flask
from flask.testing import FlaskClient
+from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@@ -15,6 +19,7 @@ from spiffworkflow_backend.services.process_instance_report_service import (
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportService,
)
+from spiffworkflow_backend.services.user_service import UserService
class TestProcessInstanceReportFilter(BaseTest):
@@ -122,13 +127,13 @@ class TestProcessInstanceReportService(BaseTest):
report_metadata=report_metadata,
)
return ProcessInstanceReportService.filter_from_metadata_with_overrides(
- report,
- process_model_identifier,
- start_from,
- start_to,
- end_from,
- end_to,
- process_status,
+ process_instance_report=report,
+ process_model_identifier=process_model_identifier,
+ start_from=start_from,
+ start_to=start_to,
+ end_from=end_from,
+ end_to=end_to,
+ process_status=process_status,
)
def _filter_by_dict_from_metadata(self, report_metadata: dict) -> dict[str, str]:
@@ -743,3 +748,387 @@ class TestProcessInstanceReportService(BaseTest):
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["sue"]
+
+ def test_can_filter_by_completed_instances_initiated_by_me(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_can_filter_by_completed_instances_initiated_by_me."""
+ process_model_id = "runs_without_input/sample"
+ bpmn_file_location = "sample"
+ process_model = load_test_spec(
+ process_model_id,
+ process_model_source_directory=bpmn_file_location,
+ )
+ user_one = self.find_or_create_user(username="user_one")
+ user_two = self.find_or_create_user(username="user_two")
+
+ # Several processes to ensure they do not return in the result
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_one
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+
+ process_instance_report = ProcessInstanceReportService.report_with_identifier(
+ user=user_one,
+ report_identifier="system_report_completed_instances_initiated_by_me",
+ )
+ report_filter = (
+ ProcessInstanceReportService.filter_from_metadata_with_overrides(
+ process_instance_report=process_instance_report,
+ process_model_identifier=process_model.id,
+ )
+ )
+ response_json = ProcessInstanceReportService.run_process_instance_report(
+ report_filter=report_filter,
+ process_instance_report=process_instance_report,
+ user=user_one,
+ )
+
+ assert len(response_json["results"]) == 2
+ assert response_json["results"][0]["process_initiator_id"] == user_one.id
+ assert response_json["results"][1]["process_initiator_id"] == user_one.id
+ assert response_json["results"][0]["status"] == "complete"
+ assert response_json["results"][1]["status"] == "complete"
+
+ def test_can_filter_by_completed_instances_with_tasks_completed_by_me(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_can_filter_by_completed_instances_with_tasks_completed_by_me."""
+ process_model_id = "runs_without_input/sample"
+ bpmn_file_location = "sample"
+ process_model = load_test_spec(
+ process_model_id,
+ process_model_source_directory=bpmn_file_location,
+ )
+ user_one = self.find_or_create_user(username="user_one")
+ user_two = self.find_or_create_user(username="user_two")
+
+ # Several processes to ensure they do not return in the result
+ process_instance_created_by_user_one_one = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ process_instance_created_by_user_one_three = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_one
+ )
+ )
+ process_instance_created_by_user_two_one = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_two
+ )
+
+ human_task_for_user_one_one = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_one.id,
+ completed_by_user_id=user_one.id,
+ )
+ human_task_for_user_one_two = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_two_one.id,
+ completed_by_user_id=user_one.id,
+ )
+ human_task_for_user_one_three = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_three.id,
+ completed_by_user_id=user_one.id,
+ )
+ human_task_for_user_two_one = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_one.id,
+ completed_by_user_id=user_two.id,
+ )
+ human_task_for_user_two_two = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_two_one.id,
+ completed_by_user_id=user_two.id,
+ )
+ human_task_for_user_two_three = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_three.id,
+ completed_by_user_id=user_two.id,
+ )
+ db.session.add(human_task_for_user_one_one)
+ db.session.add(human_task_for_user_one_two)
+ db.session.add(human_task_for_user_one_three)
+ db.session.add(human_task_for_user_two_one)
+ db.session.add(human_task_for_user_two_two)
+ db.session.add(human_task_for_user_two_three)
+ db.session.commit()
+
+ process_instance_report = ProcessInstanceReportService.report_with_identifier(
+ user=user_one,
+ report_identifier=(
+ "system_report_completed_instances_with_tasks_completed_by_me"
+ ),
+ )
+ report_filter = (
+ ProcessInstanceReportService.filter_from_metadata_with_overrides(
+ process_instance_report=process_instance_report,
+ process_model_identifier=process_model.id,
+ )
+ )
+ response_json = ProcessInstanceReportService.run_process_instance_report(
+ report_filter=report_filter,
+ process_instance_report=process_instance_report,
+ user=user_one,
+ )
+
+ assert len(response_json["results"]) == 1
+ assert response_json["results"][0]["process_initiator_id"] == user_two.id
+ assert (
+ response_json["results"][0]["id"]
+ == process_instance_created_by_user_two_one.id
+ )
+ assert response_json["results"][0]["status"] == "complete"
+
+ def test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups."""
+ process_model_id = "runs_without_input/sample"
+ bpmn_file_location = "sample"
+ process_model = load_test_spec(
+ process_model_id,
+ process_model_source_directory=bpmn_file_location,
+ )
+ user_group_one = GroupModel(identifier="group_one")
+ user_group_two = GroupModel(identifier="group_two")
+ db.session.add(user_group_one)
+ db.session.add(user_group_two)
+ db.session.commit()
+
+ user_one = self.find_or_create_user(username="user_one")
+ user_two = self.find_or_create_user(username="user_two")
+ user_three = self.find_or_create_user(username="user_three")
+ UserService.add_user_to_group(user_one, user_group_one)
+ UserService.add_user_to_group(user_two, user_group_one)
+ UserService.add_user_to_group(user_three, user_group_two)
+
+ # Several processes to ensure they do not return in the result
+ process_instance_created_by_user_one_one = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ process_instance_created_by_user_one_three = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_one
+ )
+ )
+ process_instance_created_by_user_two_one = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_two
+ )
+
+ human_task_for_user_group_one_one = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_one.id,
+ lane_assignment_id=user_group_one.id,
+ )
+ human_task_for_user_group_one_two = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_three.id,
+ lane_assignment_id=user_group_one.id,
+ )
+ human_task_for_user_group_one_three = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_two_one.id,
+ lane_assignment_id=user_group_one.id,
+ )
+ human_task_for_user_group_two_one = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_two_one.id,
+ lane_assignment_id=user_group_two.id,
+ )
+ human_task_for_user_group_two_two = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_one.id,
+ lane_assignment_id=user_group_two.id,
+ )
+ db.session.add(human_task_for_user_group_one_one)
+ db.session.add(human_task_for_user_group_one_two)
+ db.session.add(human_task_for_user_group_one_three)
+ db.session.add(human_task_for_user_group_two_one)
+ db.session.add(human_task_for_user_group_two_two)
+ db.session.commit()
+
+ process_instance_report = ProcessInstanceReportService.report_with_identifier(
+ user=user_one,
+ report_identifier=(
+ "system_report_completed_instances_with_tasks_completed_by_my_groups"
+ ),
+ )
+ report_filter = (
+ ProcessInstanceReportService.filter_from_metadata_with_overrides(
+ process_instance_report=process_instance_report,
+ process_model_identifier=process_model.id,
+ )
+ )
+ response_json = ProcessInstanceReportService.run_process_instance_report(
+ report_filter=report_filter,
+ process_instance_report=process_instance_report,
+ user=user_one,
+ )
+
+ assert len(response_json["results"]) == 2
+ assert response_json["results"][0]["process_initiator_id"] == user_two.id
+ assert (
+ response_json["results"][0]["id"]
+ == process_instance_created_by_user_two_one.id
+ )
+ assert response_json["results"][0]["status"] == "complete"
+ assert response_json["results"][1]["process_initiator_id"] == user_one.id
+ assert (
+ response_json["results"][1]["id"]
+ == process_instance_created_by_user_one_one.id
+ )
+ assert response_json["results"][1]["status"] == "complete"
+
+ def test_can_filter_by_with_relation_to_me(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_can_filter_by_with_relation_to_me."""
+ process_model_id = "runs_without_input/sample"
+ bpmn_file_location = "sample"
+ process_model = load_test_spec(
+ process_model_id,
+ process_model_source_directory=bpmn_file_location,
+ )
+ user_group_one = GroupModel(identifier="group_one")
+ user_group_two = GroupModel(identifier="group_two")
+ db.session.add(user_group_one)
+ db.session.add(user_group_two)
+ db.session.commit()
+
+ user_one = self.find_or_create_user(username="user_one")
+ user_two = self.find_or_create_user(username="user_two")
+ user_three = self.find_or_create_user(username="user_three")
+ UserService.add_user_to_group(user_one, user_group_one)
+ UserService.add_user_to_group(user_two, user_group_one)
+ UserService.add_user_to_group(user_three, user_group_two)
+
+ # Several processes to ensure they do not return in the result
+ process_instance_created_by_user_one_one = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ )
+ process_instance_created_by_user_one_two = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_one
+ )
+ )
+ process_instance_created_by_user_one_three = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_one
+ )
+ )
+ process_instance_created_by_user_two_one = (
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="complete", user=user_two
+ )
+ self.create_process_instance_from_process_model(
+ process_model=process_model, status="waiting", user=user_two
+ )
+
+ human_task_for_user_group_one_one = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_one.id,
+ lane_assignment_id=user_group_one.id,
+ )
+ human_task_for_user_group_one_two = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_three.id,
+ lane_assignment_id=user_group_one.id,
+ )
+ human_task_for_user_group_one_three = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_two_one.id,
+ lane_assignment_id=user_group_one.id,
+ )
+ human_task_for_user_group_two_one = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_two_one.id,
+ lane_assignment_id=user_group_two.id,
+ )
+ human_task_for_user_group_two_two = HumanTaskModel(
+ process_instance_id=process_instance_created_by_user_one_one.id,
+ lane_assignment_id=user_group_two.id,
+ )
+ db.session.add(human_task_for_user_group_one_one)
+ db.session.add(human_task_for_user_group_one_two)
+ db.session.add(human_task_for_user_group_one_three)
+ db.session.add(human_task_for_user_group_two_one)
+ db.session.add(human_task_for_user_group_two_two)
+ db.session.commit()
+
+ UserService.add_user_to_human_tasks_if_appropriate(user_one)
+
+ process_instance_report = ProcessInstanceReportService.report_with_identifier(
+ user=user_one
+ )
+ report_filter = (
+ ProcessInstanceReportService.filter_from_metadata_with_overrides(
+ process_instance_report=process_instance_report,
+ process_model_identifier=process_model.id,
+ with_relation_to_me=True,
+ )
+ )
+ response_json = ProcessInstanceReportService.run_process_instance_report(
+ report_filter=report_filter,
+ process_instance_report=process_instance_report,
+ user=user_one,
+ )
+
+ assert len(response_json["results"]) == 4
+ process_instance_ids_in_results = [r["id"] for r in response_json["results"]]
+ assert (
+ process_instance_created_by_user_one_one.id
+ in process_instance_ids_in_results
+ )
+ assert (
+ process_instance_created_by_user_one_two.id
+ in process_instance_ids_in_results
+ )
+ assert (
+ process_instance_created_by_user_one_three.id
+ in process_instance_ids_in_results
+ )
+ assert (
+ process_instance_created_by_user_two_one.id
+ in process_instance_ids_in_results
+ )
diff --git a/tests/spiffworkflow_backend/unit/test_user_service.py b/tests/spiffworkflow_backend/unit/test_user_service.py
new file mode 100644
index 000000000..959975d5b
--- /dev/null
+++ b/tests/spiffworkflow_backend/unit/test_user_service.py
@@ -0,0 +1,54 @@
+"""Process Model."""
+from flask.app import Flask
+from flask.testing import FlaskClient
+from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+
+from spiffworkflow_backend.models.user_group_assignment_waiting import (
+ UserGroupAssignmentWaitingModel,
+)
+from spiffworkflow_backend.services.group_service import GroupService
+from spiffworkflow_backend.services.user_service import UserService
+
+
+class TestUserService(BaseTest):
+ """TestUserService."""
+
+ def test_assigning_a_group_to_a_user_before_the_user_is_created(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_waiting_group_assignments."""
+ a_test_group = GroupService.find_or_create_group("aTestGroup")
+ UserService.add_waiting_group_assignment("initiator_user", a_test_group)
+ initiator_user = self.find_or_create_user("initiator_user")
+ assert initiator_user.groups[0] == a_test_group
+
+ def test_assigning_a_group_to_all_users_updates_new_users(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_waiting_group_assignments."""
+ everybody_group = GroupService.find_or_create_group("everybodyGroup")
+ UserService.add_waiting_group_assignment(
+ UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group
+ )
+ initiator_user = self.find_or_create_user("initiator_user")
+ assert initiator_user.groups[0] == everybody_group
+
+ def test_assigning_a_group_to_all_users_updates_existing_users(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_waiting_group_assignments."""
+ initiator_user = self.find_or_create_user("initiator_user")
+ everybody_group = GroupService.find_or_create_group("everybodyGroup")
+ UserService.add_waiting_group_assignment(
+ UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group
+ )
+ assert initiator_user.groups[0] == everybody_group