Merge branch 'main' into feature/remove-loop-reset
This commit is contained in:
commit
2ba583577a
5
.flake8
5
.flake8
|
@ -2,6 +2,7 @@
|
|||
select = B,B9,C,D,DAR,E,F,N,RST,S,W
|
||||
ignore = E203,E501,RST201,RST203,RST301,W503,S410,S320
|
||||
max-line-length = 120
|
||||
extend-ignore = E203
|
||||
max-complexity = 30
|
||||
docstring-convention = google
|
||||
rst-roles = class,const,func,meth,mod,ref
|
||||
|
@ -17,10 +18,10 @@ per-file-ignores =
|
|||
# THEN, test_hey.py will NOT be excluding D103
|
||||
|
||||
# asserts are ok in tests
|
||||
spiffworkflow-backend/tests/*:S101,D100,D101,D102,D103
|
||||
spiffworkflow-backend/tests/*:S101,D100,D101,D102,D103,D107
|
||||
|
||||
# prefer naming functions descriptively rather than forcing comments
|
||||
spiffworkflow-backend/src/*:D100,D101,D102,D103
|
||||
spiffworkflow-backend/src/*:D100,D101,D102,D103,D107
|
||||
|
||||
spiffworkflow-backend/bin/keycloak_test_server.py:B950,D
|
||||
spiffworkflow-backend/conftest.py:S105
|
||||
|
|
|
@ -12,9 +12,13 @@ repos:
|
|||
# exclude: ^migrations/
|
||||
exclude: "/migrations/"
|
||||
|
||||
# otherwise it will not fix long lines if the long lines contain long strings
|
||||
# --preview because otherwise it will not fix long lines if the long lines contain long strings
|
||||
# https://github.com/psf/black/pull/1132
|
||||
# https://github.com/psf/black/pull/1609
|
||||
# --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth
|
||||
# kind of prefer long lines rather than cutely-formatted sets of lines.
|
||||
# TODO: enable when its safe to update the files
|
||||
# args: [--preview, --line-length, "110"]
|
||||
args: [--preview]
|
||||
|
||||
- id: check-added-large-files
|
||||
|
|
12
README.md
12
README.md
|
@ -31,6 +31,18 @@ Requires at root:
|
|||
- .pre-commit-config.yaml
|
||||
- pyproject.toml
|
||||
|
||||
Run cypress automated browser tests
|
||||
-----------------------------------
|
||||
|
||||
Get the app running so you can access the frontend at http://localhost:7001 in your browser.
|
||||
|
||||
First install nodejs, ideally the version in .tool-versions (but likely other versions will work).
|
||||
|
||||
Then:
|
||||
|
||||
cd spiffworkflow-frontend
|
||||
npm install
|
||||
./bin/run_cypress_tests_locally
|
||||
|
||||
License
|
||||
-------
|
||||
|
|
|
@ -15,4 +15,4 @@ if app.config["ENV"] != "production":
|
|||
app.register_blueprint(proxy_blueprint)
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="localhost", port=5000)
|
||||
app.run(host="localhost", port=7004)
|
|
@ -101,7 +101,7 @@ version = "0.1.0"
|
|||
description = "Make HTTP Requests available to SpiffWorkflow Service Tasks"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.11"
|
||||
python-versions = "^3.9"
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -111,7 +111,7 @@ requests = "^2.28.1"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/connector-http.git"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "337671b38f47bd8a3113bc6fa85b987828c4ee66"
|
||||
resolved_reference = "8ae0b27d5a3e79562f3d1da01be2dcadef49a195"
|
||||
|
||||
[[package]]
|
||||
name = "connector-slack"
|
||||
|
@ -333,7 +333,7 @@ version = "0.1.0"
|
|||
description = "A blueprint that can allow (and limit) SpiffWorkflow's Service Tasks access to an organizations API's, such as connections to AWS Services and existing applications."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.10"
|
||||
python-versions = "^3.9"
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -344,7 +344,7 @@ Flask-OAuthlib = "^0.9.6"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/spiffworkflow-proxy"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "cfe9b93665e10390a2e64c492c57bd2613364588"
|
||||
resolved_reference = "6cb2bbea923946cb2db3bf571a4ce28c776434b9"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
select = B,B9,C,D,DAR,E,F,N,RST,S,W
|
||||
ignore = E203,E501,RST201,RST203,RST301,W503,S410,S320
|
||||
max-line-length = 120
|
||||
extend-ignore = E203
|
||||
max-complexity = 30
|
||||
docstring-convention = google
|
||||
rst-roles = class,const,func,meth,mod,ref
|
||||
|
@ -17,10 +18,10 @@ per-file-ignores =
|
|||
# THEN, test_hey.py will NOT be excluding D103
|
||||
|
||||
# asserts are ok in tests
|
||||
tests/*:S101,D100,D101,D102,D103
|
||||
tests/*:S101,D100,D101,D102,D103,D107
|
||||
|
||||
# prefer naming functions descriptively rather than forcing comments
|
||||
src/*:D100,D101,D102,D103
|
||||
src/*:D100,D101,D102,D103,D107
|
||||
|
||||
bin/keycloak_test_server.py:B950,D
|
||||
conftest.py:S105
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_ENV=staging
|
||||
fi
|
||||
|
||||
if [[ -z "${FLASK_SESSION_SECRET_KEY:-}" ]]; then
|
||||
export FLASK_SESSION_SECRET_KEY=staging_super_secret_key_dont_tell_anyone
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD=St4g3Th1515
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_DATABASE_NAME=spiffworkflow_backend_staging
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY=always
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND='http://167.172.242.138:7001'
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_URL:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_URL='http://167.172.242.138:7000'
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL='http://167.172.242.138:7002'
|
||||
fi
|
||||
|
||||
git pull
|
||||
./bin/build_and_run_with_docker_compose
|
||||
./bin/wait_for_server_to_be_up
|
|
@ -20,7 +20,8 @@ fi
|
|||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
|
||||
|
||||
export FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||
# export FLASK_SESSION_SECRET_KEY="super_secret_key"
|
||||
export FLASK_SESSION_SECRET_KEY="e7711a3ba96c46c68e084a86952de16f"
|
||||
export SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT="/"
|
||||
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
|
||||
|
|
|
@ -37,7 +37,7 @@ from spiffworkflow_backend import create_app # noqa: E402
|
|||
def app() -> Flask:
|
||||
"""App."""
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "unit_testing"
|
||||
os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key"
|
||||
os.environ["FLASK_SESSION_SECRET_KEY"] = "e7711a3ba96c46c68e084a86952de16f"
|
||||
app = create_app()
|
||||
|
||||
return app
|
||||
|
|
|
@ -51,7 +51,7 @@ services:
|
|||
context: .
|
||||
environment:
|
||||
- FLASK_DEBUG=0
|
||||
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
|
||||
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-e7711a3ba96c46c68e084a86952de16f}
|
||||
- SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT=/
|
||||
- SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
|
||||
|
|
|
@ -86,6 +86,7 @@ while read -r input_line; do
|
|||
custom_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
|
||||
first_line_processed="true"
|
||||
elif [[ -n "$input_line" ]]; then
|
||||
echo "Importing: $input_line"
|
||||
user_email=$(awk -F ',' '{print $1}' <<<"$input_line")
|
||||
username=$(awk -F '@' '{print $1}' <<<"$user_email")
|
||||
user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
|
||||
|
|
|
@ -396,7 +396,7 @@
|
|||
"otpPolicyLookAheadWindow" : 1,
|
||||
"otpPolicyPeriod" : 30,
|
||||
"otpPolicyCodeReusable" : false,
|
||||
"otpSupportedApplications" : [ "totpAppFreeOTPName", "totpAppGoogleName" ],
|
||||
"otpSupportedApplications" : [ "totpAppGoogleName", "totpAppFreeOTPName" ],
|
||||
"webAuthnPolicyRpEntityName" : "keycloak",
|
||||
"webAuthnPolicySignatureAlgorithms" : [ "ES256" ],
|
||||
"webAuthnPolicyRpId" : "",
|
||||
|
@ -807,6 +807,190 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "3730e6ec-4b0c-4fbe-a34b-2cd43d8c9854",
|
||||
"createdTimestamp" : 1678461819329,
|
||||
"username" : "core10.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core10.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "225" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "223cbe3b-d432-4707-b826-6220caa14bd7",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819366,
|
||||
"secretData" : "{\"value\":\"Mp81SeHhDQa2U/i/S2CfPnKvjwRDJCKZMgCQX3BkZWE/a6791XjXmwB8DE5qS8tiST68BQoQRuc1VCiNKL3zaQ==\",\"salt\":\"Jb0BB2tIQ+HUJQIFr82g9w==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "88e7ca9e-1825-4d4a-9f60-29368023c67b",
|
||||
"createdTimestamp" : 1678461819411,
|
||||
"username" : "core11.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core11.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "226" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "46dc7656-b70b-4d86-80fc-aa08d807be2b",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819447,
|
||||
"secretData" : "{\"value\":\"hgBEI05fhPMVx47O9KmnrTvPomKJXK0IjEHZ30zM3fu6maT2fOHGh4+ti6MVhKqQeXKZR4wtC3i1RoqLNOsjpQ==\",\"salt\":\"BWxZnmTfzggGqzVKkFY+vQ==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "6504eeda-be24-488b-ace4-1d50a7a354bc",
|
||||
"createdTimestamp" : 1678461819494,
|
||||
"username" : "core12.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core12.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "227" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "bde05120-10b5-4796-b559-9238847d2604",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819527,
|
||||
"secretData" : "{\"value\":\"njdHu9w1jeSvaNbdwVf0X+3TZaHmZVwUc+/TOAtv05eNGBIW9Vt1+500AsLReHS8lb/I3fglr5I9ZskYHUc0fA==\",\"salt\":\"lH6xJHf1jQGX1j4bYH6GXA==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "ed249cd3-c66e-46e0-9184-1e6468b57afa",
|
||||
"createdTimestamp" : 1678461819557,
|
||||
"username" : "core13.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core13.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "228" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "81b65ee8-6fcd-4cd6-8886-aa44feefa55f",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819592,
|
||||
"secretData" : "{\"value\":\"ywBsPI0pdoCOjNWinYNZQBBzL3NRp2u2jv3aXBGxneTo9v8XaVweGL52HIyTikdfmX46TEMIH6LQopaYFcwhng==\",\"salt\":\"GTw17rcE4UvB/Dx4UUkAog==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "1b7b3aa4-b0fe-46c7-a9a1-3fb3c99c7576",
|
||||
"createdTimestamp" : 1678461819624,
|
||||
"username" : "core14.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core14.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "229" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "0c24ffe5-cb97-4b0d-a0d1-920de540742e",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819658,
|
||||
"secretData" : "{\"value\":\"3RXjoEUpqxH6RM0sZUf393H9nzyVADId8IWNru9fWgdQg6tHaZezRBZ/lRRERvvdmLiupQ3cMsL/HHvPRQA6tA==\",\"salt\":\"zkaBJY+Dvg5Az74MACBBUg==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "8e2b39a8-a744-4345-928f-da1a36f15f46",
|
||||
"createdTimestamp" : 1678461819686,
|
||||
"username" : "core15.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core15.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "230" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "14a91e80-cec9-44cf-aa85-28e0043f660d",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819720,
|
||||
"secretData" : "{\"value\":\"JnP9MpLDM92LuzJnEVUy0vzm9LoSttezepYu4ANfJlmcS6cUvnnh1yDKm43I2YzM4+mXRdxJyoLZTk/ZpmshSQ==\",\"salt\":\"5CKz6mrqr4IaUeEuu/hR9Q==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "ffe3e131-9479-49d2-8125-83dc86a16478",
|
||||
"createdTimestamp" : 1678461819751,
|
||||
"username" : "core16.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core16.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "231" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "cf010c6c-035e-4a2f-ab74-5617fd23c808",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819786,
|
||||
"secretData" : "{\"value\":\"WeZ+YxLVtjRhlLZnb6j3AfecmQEsvTm3iM8ZqQthgq9c4BuZ23qare3PEVlRCA1+Oj5sAOOS1hs9iab6ia49wQ==\",\"salt\":\"uai22Okju4dg7GfO7p3C1Q==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "94bcef08-2af1-4805-864d-cbabcd851d67",
|
||||
"createdTimestamp" : 1678461819815,
|
||||
"username" : "core17.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core17.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "232" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "c7a58ff0-7c56-464b-9009-b6e845075087",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819850,
|
||||
"secretData" : "{\"value\":\"R53+DKM2eyUXDYJDjW9BtwdY+x0/CUhgUDDYjip7BvGAepzRqPvZVbCLqJjFf6YctO4Va7F65n4evd40GbO7fQ==\",\"salt\":\"U/ia7H+I4yeD3bpP1vnH6Q==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "3b81b45e-759b-4d7a-aa90-adf7b447208c",
|
||||
"createdTimestamp" : 1676302140358,
|
||||
|
@ -922,6 +1106,75 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "89d57569-1a90-412a-ba01-aa8ff19ed171",
|
||||
"createdTimestamp" : 1678461819085,
|
||||
"username" : "core7.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core7.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "222" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "cfeb64ec-a38a-4f95-b0cd-28b5501524d8",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819121,
|
||||
"secretData" : "{\"value\":\"w4WKqWXTlin6MPQi0mO+Bvktb2zuMdIylqNNxYgBCnd5vwzq2widp7G9f3wz8Iy0wY8K2rqBjdSmmbZ7fJ8//Q==\",\"salt\":\"SRuRkx3572cDGoWhqAQGLQ==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "81efd609-b6ae-42ec-800e-d6fcca2f8282",
|
||||
"createdTimestamp" : 1678461819150,
|
||||
"username" : "core8.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core8.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "223" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "0b476f6f-7aa4-4f75-bf5c-ac47521f3900",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819185,
|
||||
"secretData" : "{\"value\":\"ALWI40OEZUhMJ1CQTV9wSrwQUWfYNiYbN2JTmCUfbLUcUbY+rTrKOfAn9Mc/bCEFJomiTb9u/eqnkKX/lCGgew==\",\"salt\":\"wW2T8PkpCnnPfMNwpPVUVQ==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "a1233c9f-e59a-48dc-aaa7-1513f1aa5654",
|
||||
"createdTimestamp" : 1678461819225,
|
||||
"username" : "core9.contributor",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "core9.contributor@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "224" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "907b9d46-b8a3-4a14-ab89-b07d2c4d431a",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819266,
|
||||
"secretData" : "{\"value\":\"v9aFLHzLyiwWuAxNeVtRjtXzRtug6KU2f19SbS8dBdPC0mlHORoLYXy6VoAMdcTv8bfrW6e9iCgqWnXdXU6yMg==\",\"salt\":\"giVxblJWbFNNPiZZKxWYxg==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "7b9767ac-24dc-43b0-838f-29e16b4fd14e",
|
||||
"createdTimestamp" : 1675718483773,
|
||||
|
@ -1522,6 +1775,29 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "9a4d176c-e61e-4392-8c50-a04988606aa6",
|
||||
"createdTimestamp" : 1678461818383,
|
||||
"username" : "infra6.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "infra6.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "212" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "c381e58c-3e06-4e10-bd23-46f258c1c91f",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818420,
|
||||
"secretData" : "{\"value\":\"m17+awcU3Ezhfi/gBK0xyxvnGKHads95lhn7uxvEXaPCJF0ioN8C27tH1RwU1w9ptdWjWKWAM9dcimIegy7M7g==\",\"salt\":\"0kCljoos7qzCnVdv+3IMjQ==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "b8d0d90e-9a7e-446c-9984-082cb315af8f",
|
||||
"createdTimestamp" : 1675718484095,
|
||||
|
@ -1869,6 +2145,75 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "a368625b-b905-4e0d-83f6-dfe707b6320a",
|
||||
"createdTimestamp" : 1678461818455,
|
||||
"username" : "legal6.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "legal6.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "213" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "53a21d32-1da5-45f1-a7d9-e45304b213d1",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818490,
|
||||
"secretData" : "{\"value\":\"9zEoc1uV0QXsMvAS8lA1xdh4bOqcPdSAItg7zBFr5i+In/xOBtpRM0277nMgDNLtar4s+HRhytWgJ7OidVmjsw==\",\"salt\":\"ahEvQYvH0bHbT/uHz1I9QA==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "e02e085f-eb50-4fe3-844c-24e41479ab47",
|
||||
"createdTimestamp" : 1678461818523,
|
||||
"username" : "legal7.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "legal7.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "214" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "f5377236-8b0b-4be4-8dab-afb2c4a6470f",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818557,
|
||||
"secretData" : "{\"value\":\"dyQhBsrNeYHkbJudEjiay3duLFO9B66l0d+2L26S+/HMGuKfuI4NT+gju1MfQPVJhyC01FH7EmDGGS8I45i2jw==\",\"salt\":\"kU4NM5QOWvGSX+kVyvwSoA==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "4de624bd-485f-49d5-817c-ba66c31be7a9",
|
||||
"createdTimestamp" : 1678461818589,
|
||||
"username" : "legal8.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "legal8.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "215" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "5d71a02b-2f4b-484d-9125-a4454a17a800",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818632,
|
||||
"secretData" : "{\"value\":\"UH+hrjz9F+X0vQlbgzaFiZBA5uol9Lnjs1/5VpBnbWuISF6MAlxj2fmbnZbw4ILVSllaQvVSFaD4YUxbnRhUmw==\",\"salt\":\"MuAF2Rl7IOxOgZ7Xbqs3RQ==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "8a03f00f-310d-4bae-b918-f6f128f98095",
|
||||
"createdTimestamp" : 1677187934419,
|
||||
|
@ -1958,6 +2303,29 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "97843876-e1b6-469a-bab4-f9bce4aa5936",
|
||||
"createdTimestamp" : 1678461819014,
|
||||
"username" : "mobile.project-lead",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "mobile.project-lead@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "221" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "96c00769-4348-4ad3-82c5-f34124602c17",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461819049,
|
||||
"secretData" : "{\"value\":\"E7nVydRqQ+TZs54VmJcT4AjjtT1la7PmQbOnylqTPkkcOdLRmZbNTw/K429lOhqUHX7y1prC3OjGdY1VI8bjsg==\",\"salt\":\"D61yv2zS3Bi8epVKjRpWQw==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "9d23748e-23a7-4c48-956c-64da75871277",
|
||||
"createdTimestamp" : 1675718484779,
|
||||
|
@ -2001,6 +2369,29 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "e8e67210-5088-46bc-97db-09dbcaf9de97",
|
||||
"createdTimestamp" : 1678461818939,
|
||||
"username" : "nomos.project-lead",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "nomos.project-lead@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "220" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "8139f9b8-bad9-41d2-b3c6-589a2c11bf45",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818975,
|
||||
"secretData" : "{\"value\":\"6g5XIaFghMzx8CFYO6VJLGpUqBRiAEwFklZSI+uzJ5vrMsDvrcGjDuWtY+lmRO4lKqy30lBvqhMFvPT6pCxF3g==\",\"salt\":\"dT+XvwD+hxUwRAJCZFFYiA==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "df72b3d2-07fd-4cb0-a447-a1c433db49d5",
|
||||
"createdTimestamp" : 1676302143785,
|
||||
|
@ -2185,6 +2576,98 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "07f7a010-7542-4c2f-adf8-04b39433181d",
|
||||
"createdTimestamp" : 1678461818663,
|
||||
"username" : "peopleops.partner6.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "peopleops.partner6.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "216" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "867e9236-3a15-4198-b085-d36a7fa859e9",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818713,
|
||||
"secretData" : "{\"value\":\"kmQkAD459XkLCGaWWTr1rrwZYQ2gQ4k2xTroJZAyHmWvBBnKg+a74cRaW2Y3dnzcGTlcprtuMvwYVfq7HIOkmg==\",\"salt\":\"uKORqhpJJnceOf/q56BiSA==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "5d41b5b7-bc3c-42fe-b20b-56a7c6cd3801",
|
||||
"createdTimestamp" : 1678461818743,
|
||||
"username" : "peopleops.partner7.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "peopleops.partner7.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "217" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "745d419f-c6de-4504-9c8e-c3f7b1ac747e",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818778,
|
||||
"secretData" : "{\"value\":\"myjshlqPW/3DpwC5X4vsAaqcsisdKwqr+CQXP18mt3AQMzqipHJaVAEAJzkZS4j42VB/XAvh0olMxb8Vapyw3g==\",\"salt\":\"jNpX6DyT5Tt/5dPXYiQfpQ==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "73523c93-6104-4494-b1c8-2af6087bcdd9",
|
||||
"createdTimestamp" : 1678461818810,
|
||||
"username" : "peopleops.partner8.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "peopleops.partner8.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "218" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "e839763b-aba2-4b4f-b715-b2c061b7430f",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818843,
|
||||
"secretData" : "{\"value\":\"M0KfNRU/4qt1WL/cGiSm6sKfN9PTK+6JiV96Y55Zg5CYaXH0ihTyGo62wS4T4YuyMm6/yTKz7+w3gdU4Zg/3Uw==\",\"salt\":\"sd/JEXtWTW4PetXzEBCNQA==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "cdff7ae3-72eb-45b6-9424-6f56df9c3b1c",
|
||||
"createdTimestamp" : 1678461818873,
|
||||
"username" : "peopleops.partner9.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "peopleops.partner9.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "219" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "5ff8e042-a72e-4b46-9efa-e1910cd09d13",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818908,
|
||||
"secretData" : "{\"value\":\"q/hdvLKerMbnpe6yjC3VxDqCFi0ne7rD5A1K39EM+XgD6bFI62qKW5JIBB5BaGz/GrWYw7ipwMBaOvLBOubSkg==\",\"salt\":\"vfnCbi47kaYpILxbL0b3Tg==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "dbf941e7-0b45-4bc6-ae9e-d7153d32ce47",
|
||||
"createdTimestamp" : 1676302143401,
|
||||
|
@ -2691,6 +3174,29 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "c684e919-6ae0-4031-a160-8e90338567b3",
|
||||
"createdTimestamp" : 1678461818310,
|
||||
"username" : "security6.sme",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "security6.sme@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "211" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "aff2f083-f6aa-4f93-899f-aaa3119a9739",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818346,
|
||||
"secretData" : "{\"value\":\"7XGMuiylxKmwDwJZtiPNLllERwN8KLoILLE/BjjXOkqN3c+C+KYgNxPhrDt8dG9PDYOq/59vh/4E2y82GLaoEw==\",\"salt\":\"ufzmAcoMLoi0jtRHwGDadg==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "b768e3ef-f905-4493-976c-bc3408c04bec",
|
||||
"createdTimestamp" : 1675447832524,
|
||||
|
@ -2794,6 +3300,52 @@
|
|||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "8205682c-71fd-43e6-9e90-bac3d28f850c",
|
||||
"createdTimestamp" : 1678461818109,
|
||||
"username" : "vac.program-lead",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "vac.program-lead@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "163" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "ea75f124-84aa-4058-819a-1175ffe1451b",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818170,
|
||||
"secretData" : "{\"value\":\"7Nuw33yzMfwWmMrPC1ytP3L5Y2HMWXANyKtP1+kjJ1HJeDSGVzYLvoKq3rIIYLRd0MQ/NFwJUyz08GAyqFfDMw==\",\"salt\":\"NSJ7cCO2SsQf/oLWJokRDg==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "cb99a5c4-2c28-4b19-b8c7-635b757fc817",
|
||||
"createdTimestamp" : 1678461818231,
|
||||
"username" : "waku.research.project-lead",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "waku.research.project-lead@status.im",
|
||||
"attributes" : {
|
||||
"spiffworkflow-employeeid" : [ "164" ]
|
||||
},
|
||||
"credentials" : [ {
|
||||
"id" : "ed5fc4a1-d574-4940-b5e4-3a1ad9d122ba",
|
||||
"type" : "password",
|
||||
"createdDate" : 1678461818268,
|
||||
"secretData" : "{\"value\":\"K7MRRw2gO4bXHJH8U4cZU2rcVQT/hxw7kMHqN1uDae9FVqFEKh014qiwePOHr5K1xjUw8uU5e/d3HCcwhuRUQw==\",\"salt\":\"R4FdsDK6NvelgQ8gH7Me0g==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
} ],
|
||||
"scopeMappings" : [ {
|
||||
"clientScope" : "offline_access",
|
||||
|
@ -4026,7 +4578,7 @@
|
|||
"subType" : "authenticated",
|
||||
"subComponents" : { },
|
||||
"config" : {
|
||||
"allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-user-property-mapper" ]
|
||||
"allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper" ]
|
||||
}
|
||||
}, {
|
||||
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
||||
|
@ -4044,7 +4596,7 @@
|
|||
"subType" : "anonymous",
|
||||
"subComponents" : { },
|
||||
"config" : {
|
||||
"allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper" ]
|
||||
"allowed-protocol-mapper-types" : [ "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper" ]
|
||||
}
|
||||
}, {
|
||||
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
||||
|
@ -4134,7 +4686,7 @@
|
|||
"internationalizationEnabled" : false,
|
||||
"supportedLocales" : [ ],
|
||||
"authenticationFlows" : [ {
|
||||
"id" : "84c5d297-0fe5-423d-a563-506a227fd48e",
|
||||
"id" : "04b09640-f53c-4c1b-b2b1-8cac25afc2bb",
|
||||
"alias" : "Account verification options",
|
||||
"description" : "Method with which to verity the existing account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4156,7 +4708,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "f816c0fb-937d-4874-823c-982e8a5b895d",
|
||||
"id" : "e7c246f4-71c3-4a48-9037-72438bdcfcbb",
|
||||
"alias" : "Authentication Options",
|
||||
"description" : "Authentication options.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4185,7 +4737,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "f9ae89b4-a726-4672-bcde-5c7d5bcae312",
|
||||
"id" : "6e9d415e-98f7-4459-b10b-45b08302c681",
|
||||
"alias" : "Browser - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4207,7 +4759,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "f56fc357-0772-4bd3-9c15-7a0265c13ae2",
|
||||
"id" : "c86b0fad-f7dd-4c58-974e-25eb83c1dacf",
|
||||
"alias" : "Direct Grant - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4229,7 +4781,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "e34eb5aa-0f88-4f92-8f1e-d64919a7313e",
|
||||
"id" : "cb7f4c87-a8fa-445a-a8d4-53869cdfed12",
|
||||
"alias" : "First broker login - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4251,7 +4803,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "5c3ff777-1fd3-4e13-a500-87fd064d4f56",
|
||||
"id" : "8fa87954-bc65-4f1e-bc55-f5bb49f59fbb",
|
||||
"alias" : "Handle Existing Account",
|
||||
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4273,7 +4825,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "a1684709-c4f0-493f-8eef-08cc4331b068",
|
||||
"id" : "e617d826-c654-4c35-96ad-8381bd1e2298",
|
||||
"alias" : "Reset - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4295,7 +4847,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "ddb4247d-e70f-4b3e-8c58-2127539df878",
|
||||
"id" : "2e4a46ae-2813-4b71-9386-c08b2f063fa6",
|
||||
"alias" : "User creation or linking",
|
||||
"description" : "Flow for the existing/non-existing user alternatives",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4318,7 +4870,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "c1feb17c-2704-4854-abde-f2d97e1441eb",
|
||||
"id" : "8fa69de0-13cf-4252-899b-c59a30ebd132",
|
||||
"alias" : "Verify Existing Account by Re-authentication",
|
||||
"description" : "Reauthentication of existing account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4340,7 +4892,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "5c7e4833-d2ce-4973-9da5-370c77efda7d",
|
||||
"id" : "204d20f6-d9a7-49ff-a7a3-45386fb884f4",
|
||||
"alias" : "browser",
|
||||
"description" : "browser based authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4376,7 +4928,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "5b0ab6fc-da4c-4b1f-8036-3c6848a7b648",
|
||||
"id" : "3c0c2987-65db-4920-ae44-34aba220c3fb",
|
||||
"alias" : "clients",
|
||||
"description" : "Base authentication for clients",
|
||||
"providerId" : "client-flow",
|
||||
|
@ -4412,7 +4964,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "1c2c738c-2f40-4dcd-b82d-17b8d32b34dc",
|
||||
"id" : "68a92113-be75-4e63-a322-8076d6c67650",
|
||||
"alias" : "direct grant",
|
||||
"description" : "OpenID Connect Resource Owner Grant",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4441,7 +4993,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "24544f4c-2e68-4f0e-9d7d-791e63b2c705",
|
||||
"id" : "a630d78f-4fe1-4350-a19d-d091d1af514d",
|
||||
"alias" : "docker auth",
|
||||
"description" : "Used by Docker clients to authenticate against the IDP",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4456,7 +5008,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "6104dbf5-a433-454b-abba-c46665c0cfe3",
|
||||
"id" : "f73b4437-8e82-4788-be69-e437b09b500c",
|
||||
"alias" : "first broker login",
|
||||
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4479,7 +5031,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "ef60185e-3c7b-425e-b813-91b1a59ccddb",
|
||||
"id" : "b7c8cc6d-bc1f-446e-b263-72214b2f5c56",
|
||||
"alias" : "forms",
|
||||
"description" : "Username, password, otp and other auth forms.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4501,7 +5053,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "f5292250-2f32-4021-a4ed-fb8316c22331",
|
||||
"id" : "a3bdf79f-8c7d-4bff-807d-76fa61093446",
|
||||
"alias" : "http challenge",
|
||||
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4523,7 +5075,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "60919bb8-7857-4217-9a3e-7f2c44396dd7",
|
||||
"id" : "ada41b4e-5a12-496d-aa1e-d31cf8c08226",
|
||||
"alias" : "registration",
|
||||
"description" : "registration flow",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4539,7 +5091,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "e16f693e-70e8-446f-bab0-c5ee7ce14506",
|
||||
"id" : "1c858bcd-2031-4056-bbf0-1fbaecdd7068",
|
||||
"alias" : "registration form",
|
||||
"description" : "registration form",
|
||||
"providerId" : "form-flow",
|
||||
|
@ -4575,7 +5127,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "61441726-52c9-4720-b801-0fc4e7a89912",
|
||||
"id" : "ff91e251-d85e-450b-bff7-d45be26777d5",
|
||||
"alias" : "reset credentials",
|
||||
"description" : "Reset credentials for a user if they forgot their password or something",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4611,7 +5163,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "87a6ced7-419e-419c-ba66-3aaad4a4970a",
|
||||
"id" : "7b0680a2-99b9-454c-b145-f286e9d60c58",
|
||||
"alias" : "saml ecp",
|
||||
"description" : "SAML ECP Profile Authentication Flow",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -4627,13 +5179,13 @@
|
|||
} ]
|
||||
} ],
|
||||
"authenticatorConfig" : [ {
|
||||
"id" : "0e0bacbb-9901-4f91-9eec-ef3b8816919b",
|
||||
"id" : "aa1e4f55-3e7f-445a-a432-7a972776d719",
|
||||
"alias" : "create unique user config",
|
||||
"config" : {
|
||||
"require.password.update.after.registration" : "false"
|
||||
}
|
||||
}, {
|
||||
"id" : "c4fecfc8-120e-44e7-8576-c2aafd9d433f",
|
||||
"id" : "fd69765e-309b-4c5d-bdd5-51343427cd27",
|
||||
"alias" : "review profile config",
|
||||
"config" : {
|
||||
"update.profile.on.first.login" : "missing"
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
email,spiffworkflow-employeeid
|
||||
admin@spiffworkflow.org
|
|
@ -13,11 +13,22 @@ codex5.sme@status.im,190
|
|||
core-a1.contributor@status.im,202
|
||||
core-a2.contributor@status.im,203
|
||||
core1.contributor@status.im,155
|
||||
core10.contributor@status.im,225
|
||||
core11.contributor@status.im,226
|
||||
core12.contributor@status.im,227
|
||||
core13.contributor@status.im,228
|
||||
core14.contributor@status.im,229
|
||||
core15.contributor@status.im,230
|
||||
core16.contributor@status.im,231
|
||||
core17.contributor@status.im,232
|
||||
core2.contributor@status.im,156
|
||||
core3.contributor@status.im,157
|
||||
core4.contributor@status.im,158
|
||||
core5.contributor@status.im,159
|
||||
core6.contributor@status.im,199
|
||||
core7.contributor@status.im,222
|
||||
core8.contributor@status.im,223
|
||||
core9.contributor@status.im,224
|
||||
core@status.im,113
|
||||
dao.project.lead@status.im
|
||||
desktop-a1.sme@status.im,210
|
||||
|
@ -31,7 +42,6 @@ desktop3.sme@status.im,196
|
|||
desktop4.sme@status.im,197
|
||||
desktop5.sme@status.im,198
|
||||
fin@status.im,118
|
||||
finance.lead@status.im,128
|
||||
finance_user1@status.im
|
||||
fluffy.project-lead@status.im,162
|
||||
harmeet@status.im,109
|
||||
|
@ -43,6 +53,7 @@ infra2.sme@status.im,132
|
|||
infra3.sme@status.im,167
|
||||
infra4.sme@status.im,175
|
||||
infra5.sme@status.im,176
|
||||
infra6.sme@status.im,212
|
||||
jakub@status.im
|
||||
jarrad@status.im
|
||||
lead@status.im,114
|
||||
|
@ -54,9 +65,14 @@ legal2.sme@status.im,165
|
|||
legal3.sme@status.im,166
|
||||
legal4.sme@status.im,177
|
||||
legal5.sme@status.im,178
|
||||
legal6.sme@status.im,213
|
||||
legal7.sme@status.im,214
|
||||
legal8.sme@status.im,215
|
||||
logos.program-lead@status.im,160
|
||||
manuchehr@status.im,110
|
||||
mobile.project-lead@status.im,221
|
||||
nimbus.program-lead@status.im,161
|
||||
nomos.project-lead@status.im,220
|
||||
peopleops.partner-a1.sme@status.im,208
|
||||
peopleops.partner.sme@status.im,148
|
||||
peopleops.partner1.sme@status.im,149
|
||||
|
@ -64,6 +80,10 @@ peopleops.partner2.sme@status.im,173
|
|||
peopleops.partner3.sme@status.im,174
|
||||
peopleops.partner4.sme@status.im,181
|
||||
peopleops.partner5.sme@status.im,182
|
||||
peopleops.partner6.sme@status.im,216
|
||||
peopleops.partner7.sme@status.im,217
|
||||
peopleops.partner8.sme@status.im,218
|
||||
peopleops.partner9.sme@status.im,219
|
||||
peopleops.partner@status.im,150
|
||||
peopleops.project-lead@status.im,147
|
||||
peopleops.talent.sme@status.im,143
|
||||
|
@ -87,4 +107,7 @@ security2.sme@status.im,168
|
|||
security3.sme@status.im,169
|
||||
security4.sme@status.im,179
|
||||
security5.sme@status.im,180
|
||||
security6.sme@status.im,211
|
||||
services.lead@status.im,122
|
||||
vac.program-lead@status.im,163
|
||||
waku.research.project-lead@status.im,164
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: e2972eaf8469
|
||||
Revises: 389800c352ee
|
||||
Create Date: 2023-03-13 22:00:21.579493
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e2972eaf8469'
|
||||
down_revision = '389800c352ee'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('process_instance_queue',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('run_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('priority', sa.Integer(), nullable=True),
|
||||
sa.Column('locked_by', sa.String(length=80), nullable=True),
|
||||
sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False)
|
||||
op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False)
|
||||
op.create_index(op.f('ix_process_instance_queue_process_instance_id'), 'process_instance_queue', ['process_instance_id'], unique=True)
|
||||
op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False)
|
||||
op.alter_column('message_instance', 'user_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
nullable=True)
|
||||
op.drop_column('process_instance', 'locked_by')
|
||||
op.drop_column('process_instance', 'locked_at_in_seconds')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('process_instance', sa.Column('locked_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True))
|
||||
op.add_column('process_instance', sa.Column('locked_by', mysql.VARCHAR(length=80), nullable=True))
|
||||
op.alter_column('message_instance', 'user_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
nullable=False)
|
||||
op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue')
|
||||
op.drop_index(op.f('ix_process_instance_queue_process_instance_id'), table_name='process_instance_queue')
|
||||
op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue')
|
||||
op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue')
|
||||
op.drop_table('process_instance_queue')
|
||||
# ### end Alembic commands ###
|
|
@ -40,7 +40,7 @@ def setup_database(session: Session) -> None:
|
|||
os.getcwd(), "instance", "testing"
|
||||
)
|
||||
flask_env_key = "FLASK_SESSION_SECRET_KEY"
|
||||
session.env[flask_env_key] = "super_secret_key"
|
||||
session.env[flask_env_key] = "e7711a3ba96c46c68e084a86952de16f"
|
||||
session.env["FLASK_APP"] = "src/spiffworkflow_backend"
|
||||
session.env["SPIFFWORKFLOW_BACKEND_ENV"] = "unit_testing"
|
||||
session.run("flask", "db", "upgrade")
|
||||
|
|
|
@ -254,6 +254,17 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "1.15.1"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.3.1"
|
||||
|
@ -403,6 +414,27 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
|
|||
[package.extras]
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "39.0.2"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
cffi = ">=1.12"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
|
||||
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
|
||||
pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"]
|
||||
sdist = ["setuptools-rust (>=0.11.4)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
tox = ["tox"]
|
||||
|
||||
[[package]]
|
||||
name = "darglint"
|
||||
version = "1.8.1"
|
||||
|
@ -1261,6 +1293,14 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.21"
|
||||
description = "C parser in Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[[package]]
|
||||
name = "pycryptodome"
|
||||
version = "3.17"
|
||||
|
@ -2234,7 +2274,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<3.12"
|
||||
content-hash = "eac3b5aa78efea376a9e23e32f9e6853cc22c17a2a21b41e30800cb7c807d017"
|
||||
content-hash = "7ab6d5021406b573edfdca4f9e0f5e62c41a6f6ea09d34154df72454887e3670"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2339,6 +2379,72 @@ certifi = [
|
|||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
cffi = [
|
||||
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
|
||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
|
||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
|
||||
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
|
||||
]
|
||||
cfgv = [
|
||||
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
|
||||
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
|
||||
|
@ -2435,6 +2541,31 @@ coverage = [
|
|||
{file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
|
||||
{file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
|
||||
]
|
||||
cryptography = [
|
||||
{file = "cryptography-39.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-win32.whl", hash = "sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480"},
|
||||
{file = "cryptography-39.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9"},
|
||||
{file = "cryptography-39.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac"},
|
||||
{file = "cryptography-39.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074"},
|
||||
{file = "cryptography-39.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1"},
|
||||
{file = "cryptography-39.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3"},
|
||||
{file = "cryptography-39.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354"},
|
||||
{file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915"},
|
||||
{file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84"},
|
||||
{file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108"},
|
||||
{file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3"},
|
||||
{file = "cryptography-39.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3"},
|
||||
{file = "cryptography-39.0.2.tar.gz", hash = "sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f"},
|
||||
]
|
||||
darglint = [
|
||||
{file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"},
|
||||
{file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
|
||||
|
@ -2970,6 +3101,10 @@ pycodestyle = [
|
|||
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
|
||||
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
|
||||
]
|
||||
pycparser = [
|
||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
]
|
||||
pycryptodome = [
|
||||
{file = "pycryptodome-3.17-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:2c5631204ebcc7ae33d11c43037b2dafe25e2ab9c1de6448eb6502ac69c19a56"},
|
||||
{file = "pycryptodome-3.17-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:04779cc588ad8f13c80a060b0b1c9d1c203d051d8a43879117fe6b8aaf1cd3fa"},
|
||||
|
|
|
@ -73,6 +73,7 @@ types-dateparser = "^1.1.4.1"
|
|||
flask-jwt-extended = "^4.4.4"
|
||||
pylint = "^2.15.10"
|
||||
flask-simple-crypt = "^0.3.3"
|
||||
cryptography = "^39.0.2"
|
||||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""__init__."""
|
||||
import base64
|
||||
import faulthandler
|
||||
import os
|
||||
import sys
|
||||
|
@ -67,6 +68,15 @@ def start_scheduler(
|
|||
) -> None:
|
||||
"""Start_scheduler."""
|
||||
scheduler = scheduler_class()
|
||||
|
||||
# TODO: polling intervals for different jobs
|
||||
polling_interval_in_seconds = app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"
|
||||
]
|
||||
# TODO: add job to release locks to simplify other queries
|
||||
# TODO: add job to delete completed entires
|
||||
# TODO: add job to run old/low priority instances so they do not get drowned out
|
||||
|
||||
scheduler.add_job(
|
||||
BackgroundProcessingService(app).process_message_instances_with_app_context,
|
||||
"interval",
|
||||
|
@ -75,7 +85,7 @@ def start_scheduler(
|
|||
scheduler.add_job(
|
||||
BackgroundProcessingService(app).process_waiting_process_instances,
|
||||
"interval",
|
||||
seconds=10,
|
||||
seconds=polling_interval_in_seconds,
|
||||
)
|
||||
scheduler.add_job(
|
||||
BackgroundProcessingService(app).process_user_input_required_process_instances,
|
||||
|
@ -85,6 +95,28 @@ def start_scheduler(
|
|||
scheduler.start()
|
||||
|
||||
|
||||
def should_start_scheduler(app: flask.app.Flask) -> bool:
|
||||
if not app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]:
|
||||
return False
|
||||
|
||||
# do not start the scheduler twice in flask debug mode but support code reloading
|
||||
if (
|
||||
app.config["ENV_IDENTIFIER"] != "local_development"
|
||||
or os.environ.get("WERKZEUG_RUN_MAIN") != "true"
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class NoOpCipher:
|
||||
def encrypt(self, value: str) -> bytes:
|
||||
return str.encode(value)
|
||||
|
||||
def decrypt(self, value: str) -> str:
|
||||
return value
|
||||
|
||||
|
||||
def create_app() -> flask.app.Flask:
|
||||
"""Create_app."""
|
||||
faulthandler.enable()
|
||||
|
@ -125,19 +157,29 @@ def create_app() -> flask.app.Flask:
|
|||
|
||||
app.json = MyJSONEncoder(app)
|
||||
|
||||
# do not start the scheduler twice in flask debug mode
|
||||
if (
|
||||
app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]
|
||||
and os.environ.get("WERKZEUG_RUN_MAIN") != "true"
|
||||
):
|
||||
if should_start_scheduler(app):
|
||||
start_scheduler(app)
|
||||
|
||||
configure_sentry(app)
|
||||
|
||||
cipher = SimpleCrypt()
|
||||
app.config["FSC_EXPANSION_COUNT"] = 2048
|
||||
cipher.init_app(app)
|
||||
app.config["CIPHER"] = cipher
|
||||
encryption_lib = app.config.get("SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB")
|
||||
if encryption_lib == "cryptography":
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
app_secret_key = app.config.get("SECRET_KEY")
|
||||
app_secret_key_bytes = app_secret_key.encode()
|
||||
base64_key = base64.b64encode(app_secret_key_bytes)
|
||||
fernet_cipher = Fernet(base64_key)
|
||||
app.config["CIPHER"] = fernet_cipher
|
||||
# for comparison against possibly-slow encryption libraries
|
||||
elif encryption_lib == "no_op_cipher":
|
||||
no_op_cipher = NoOpCipher()
|
||||
app.config["CIPHER"] = no_op_cipher
|
||||
else:
|
||||
simple_crypt_cipher = SimpleCrypt()
|
||||
app.config["FSC_EXPANSION_COUNT"] = 2048
|
||||
simple_crypt_cipher.init_app(app)
|
||||
app.config["CIPHER"] = simple_crypt_cipher
|
||||
|
||||
app.before_request(verify_token)
|
||||
app.before_request(AuthorizationService.check_for_permission)
|
||||
|
|
|
@ -21,6 +21,12 @@ SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
|||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int(
|
||||
environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS",
|
||||
default="10",
|
||||
)
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001"
|
||||
)
|
||||
|
@ -55,6 +61,13 @@ SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS = environ.get(
|
|||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"
|
||||
)
|
||||
|
||||
# cryptography or simple-crypt
|
||||
SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB = environ.get(
|
||||
# "SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB", default="cryptography"
|
||||
"SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB",
|
||||
default="no_op_cipher",
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
|
||||
)
|
||||
|
@ -140,6 +153,14 @@ SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get(
|
|||
"SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND", default="greedy"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy"
|
||||
)
|
||||
|
||||
# this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None
|
||||
|
|
|
@ -52,6 +52,13 @@ permissions:
|
|||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
|
||||
# Everybody can start all intstances
|
||||
create-test-instances:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ create ]
|
||||
uri: /process-instances/*
|
||||
|
||||
# Everyone can see everything (all groups, and processes are visible)
|
||||
read-all-process-groups:
|
||||
groups: [ everybody ]
|
||||
|
|
|
@ -66,5 +66,8 @@ from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
|
|||
from spiffworkflow_backend.models.bpmn_process_definition_relationship import (
|
||||
BpmnProcessDefinitionRelationshipModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.process_instance_queue import (
|
||||
ProcessInstanceQueueModel,
|
||||
) # noqa: F401
|
||||
|
||||
add_listeners()
|
||||
|
|
|
@ -55,7 +55,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
# The correlation keys of the process at the time the message was created.
|
||||
correlation_keys: dict = db.Column(db.JSON)
|
||||
status: str = db.Column(db.String(20), nullable=False, default="ready")
|
||||
user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
|
||||
user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore
|
||||
user = relationship("UserModel")
|
||||
counterpart_id: int = db.Column(
|
||||
db.Integer
|
||||
|
|
|
@ -105,9 +105,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
||||
spiff_step: int = db.Column(db.Integer)
|
||||
|
||||
locked_by: str | None = db.Column(db.String(80))
|
||||
locked_at_in_seconds: int | None = db.Column(db.Integer)
|
||||
|
||||
bpmn_xml_file_contents: str | None = None
|
||||
process_model_with_diagram_identifier: str | None = None
|
||||
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
"""Process_instance_queue."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel):
|
||||
"""ProcessInstanceQueueModel."""
|
||||
|
||||
__tablename__ = "process_instance_queue"
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), index=True, unique=True, nullable=False # type: ignore
|
||||
)
|
||||
run_at_in_seconds: int = db.Column(db.Integer)
|
||||
priority: int = db.Column(db.Integer)
|
||||
locked_by: Union[str, None] = db.Column(db.String(80), index=True, nullable=True)
|
||||
locked_at_in_seconds: Union[int, None] = db.Column(
|
||||
db.Integer, index=True, nullable=True
|
||||
)
|
||||
status: str = db.Column(db.String(50), index=True)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
|
@ -28,6 +28,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSc
|
|||
from spiffworkflow_backend.models.process_instance_metadata import (
|
||||
ProcessInstanceMetadataModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_queue import (
|
||||
ProcessInstanceQueueModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
|
@ -53,6 +56,9 @@ from spiffworkflow_backend.services.message_service import MessageService
|
|||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import (
|
||||
ProcessInstanceQueueService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportFilter,
|
||||
)
|
||||
|
@ -92,6 +98,7 @@ def process_instance_create(
|
|||
process_model_identifier, g.user
|
||||
)
|
||||
)
|
||||
ProcessInstanceQueueService.enqueue(process_instance)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=201,
|
||||
|
@ -272,6 +279,7 @@ def process_instance_list_for_me(
|
|||
with_relation_to_me=True,
|
||||
report_columns=report_columns,
|
||||
report_filter_by=report_filter_by,
|
||||
process_initiator_username=process_initiator_username,
|
||||
)
|
||||
|
||||
|
||||
|
@ -412,6 +420,9 @@ def process_instance_delete(
|
|||
db.session.query(SpiffStepDetailsModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.query(ProcessInstanceQueueModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.delete(process_instance)
|
||||
db.session.commit()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
"""Assertion_service."""
|
||||
import contextlib
|
||||
from typing import Generator
|
||||
|
||||
import sentry_sdk
|
||||
from flask import current_app
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def safe_assertion(condition: bool) -> Generator[bool, None, None]:
|
||||
try:
|
||||
yield True
|
||||
except AssertionError as e:
|
||||
if not condition:
|
||||
sentry_sdk.capture_exception(e)
|
||||
current_app.logger.exception(e)
|
||||
if current_app.config["ENV_IDENTIFIER"] == "local_development":
|
||||
raise e
|
|
@ -3,6 +3,9 @@ import flask
|
|||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
ProcessInstanceLockService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
|
@ -18,11 +21,13 @@ class BackgroundProcessingService:
|
|||
def process_waiting_process_instances(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
ProcessInstanceLockService.set_thread_local_locking_context("bg:waiting")
|
||||
ProcessInstanceService.do_waiting()
|
||||
|
||||
def process_user_input_required_process_instances(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
ProcessInstanceLockService.set_thread_local_locking_context("bg:userinput")
|
||||
ProcessInstanceService.do_waiting(
|
||||
ProcessInstanceStatus.user_input_required.value
|
||||
)
|
||||
|
@ -30,4 +35,5 @@ class BackgroundProcessingService:
|
|||
def process_message_instances_with_app_context(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
ProcessInstanceLockService.set_thread_local_locking_context("bg:messages")
|
||||
MessageService.correlate_all_message_instances()
|
||||
|
|
|
@ -1,19 +1,13 @@
|
|||
"""Error_handling_service."""
|
||||
import json
|
||||
from typing import Union
|
||||
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
MessageTriggerableProcessModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
|
@ -26,6 +20,8 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer
|
|||
class ErrorHandlingService:
|
||||
"""ErrorHandlingService."""
|
||||
|
||||
MESSAGE_NAME = "SystemErrorMessage"
|
||||
|
||||
@staticmethod
|
||||
def set_instance_status(instance_id: int, status: str) -> None:
|
||||
"""Set_instance_status."""
|
||||
|
@ -58,106 +54,43 @@ class ErrorHandlingService:
|
|||
ProcessInstanceStatus.error.value,
|
||||
)
|
||||
|
||||
# Second, call the System Notification Process
|
||||
# Note that this isn't the best way to do this.
|
||||
# The configs are all in the model.
|
||||
# Maybe we can move some of this to the notification process, or dmn tables.
|
||||
# Second, send a bpmn message out, but only if an exception notification address is provided
|
||||
# This will create a new Send Message with correlation keys on the recipients and the message
|
||||
# body.
|
||||
if len(process_model.exception_notification_addresses) > 0:
|
||||
try:
|
||||
self.handle_system_notification(_error, process_model)
|
||||
self.handle_system_notification(_error, process_model, _processor)
|
||||
except Exception as e:
|
||||
# hmm... what to do if a notification method fails. Probably log, at least
|
||||
current_app.logger.error(e)
|
||||
|
||||
@staticmethod
|
||||
def handle_system_notification(
|
||||
error: Union[ApiError, Exception], process_model: ProcessModelInfo
|
||||
) -> Response:
|
||||
"""Handle_system_notification."""
|
||||
recipients = process_model.exception_notification_addresses
|
||||
error: Union[ApiError, Exception],
|
||||
process_model: ProcessModelInfo,
|
||||
_processor: ProcessInstanceProcessor,
|
||||
) -> None:
|
||||
"""Send a BPMN Message - which may kick off a waiting process."""
|
||||
message_text = (
|
||||
f"There was an exception running process {process_model.id}.\nOriginal"
|
||||
f" Error:\n{error.__repr__()}"
|
||||
)
|
||||
message_payload = {"message_text": message_text, "recipients": recipients}
|
||||
message_name = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID"
|
||||
]
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_name=message_name
|
||||
).first()
|
||||
)
|
||||
message_payload = {
|
||||
"message_text": message_text,
|
||||
"recipients": process_model.exception_notification_addresses,
|
||||
}
|
||||
user_id = None
|
||||
if "user" in g:
|
||||
user_id = g.user.id
|
||||
else:
|
||||
user_id = _processor.process_instance_model.process_initiator_id
|
||||
|
||||
# Create the send message
|
||||
message_instance = MessageInstanceModel(
|
||||
message_type="send",
|
||||
name=message_name,
|
||||
name=ErrorHandlingService.MESSAGE_NAME,
|
||||
payload=message_payload,
|
||||
user_id=g.user.id,
|
||||
user_id=user_id,
|
||||
)
|
||||
db.session.add(message_instance)
|
||||
db.session.commit()
|
||||
|
||||
process_instance = MessageService.start_process_with_message(
|
||||
message_triggerable_process_model, message_instance
|
||||
)
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
# @staticmethod
|
||||
# def handle_sentry_notification(_error: ApiError, _recipients: List) -> None:
|
||||
# """SentryHandler."""
|
||||
# ...
|
||||
#
|
||||
# @staticmethod
|
||||
# def handle_email_notification(
|
||||
# processor: ProcessInstanceProcessor,
|
||||
# error: Union[ApiError, Exception],
|
||||
# recipients: List,
|
||||
# ) -> None:
|
||||
# """EmailHandler."""
|
||||
# subject = "Unexpected error in app"
|
||||
# if isinstance(error, ApiError):
|
||||
# content = f"{error.message}"
|
||||
# else:
|
||||
# content = str(error)
|
||||
# content_html = content
|
||||
#
|
||||
# EmailService.add_email(
|
||||
# subject,
|
||||
# "sender@company.com",
|
||||
# recipients,
|
||||
# content,
|
||||
# content_html,
|
||||
# cc=None,
|
||||
# bcc=None,
|
||||
# reply_to=None,
|
||||
# attachment_files=None,
|
||||
# )
|
||||
#
|
||||
# @staticmethod
|
||||
# def handle_waku_notification(_error: ApiError, _recipients: List) -> Any:
|
||||
# """WakuHandler."""
|
||||
# # class WakuMessage:
|
||||
# # """WakuMessage."""
|
||||
# #
|
||||
# # payload: str
|
||||
# # contentTopic: str # Optional
|
||||
# # version: int # Optional
|
||||
# # timestamp: int # Optional
|
||||
|
||||
|
||||
class FailingService:
|
||||
"""FailingService."""
|
||||
|
||||
@staticmethod
|
||||
def fail_as_service() -> None:
|
||||
"""It fails."""
|
||||
raise ApiError(
|
||||
error_code="failing_service", message="This is my failing service"
|
||||
)
|
||||
MessageService.correlate_send_message(message_instance)
|
||||
|
|
|
@ -161,6 +161,9 @@ def setup_logger(app: Flask) -> None:
|
|||
spiff_logger_filehandler.setLevel(spiff_log_level)
|
||||
spiff_logger_filehandler.setFormatter(log_formatter)
|
||||
|
||||
# these loggers have been deemed too verbose to be useful
|
||||
garbage_loggers_to_exclude = ["connexion"]
|
||||
|
||||
# make all loggers act the same
|
||||
for name in logging.root.manager.loggerDict:
|
||||
# use a regex so spiffworkflow_backend isn't filtered out
|
||||
|
@ -172,10 +175,15 @@ def setup_logger(app: Flask) -> None:
|
|||
the_logger.propagate = False
|
||||
the_logger.addHandler(spiff_logger_filehandler)
|
||||
else:
|
||||
if len(the_logger.handlers) < 1:
|
||||
# it's very verbose, so only add handlers for the obscure loggers when log level is DEBUG
|
||||
if upper_log_level_string == "DEBUG":
|
||||
the_logger.addHandler(logging.StreamHandler(sys.stdout))
|
||||
# it's very verbose, so only add handlers for the obscure loggers when log level is DEBUG
|
||||
if upper_log_level_string == "DEBUG":
|
||||
if len(the_logger.handlers) < 1:
|
||||
exclude_logger_name_from_logging = False
|
||||
for garbage_logger in garbage_loggers_to_exclude:
|
||||
if name.startswith(garbage_logger):
|
||||
exclude_logger_name_from_logging = True
|
||||
if not exclude_logger_name_from_logging:
|
||||
the_logger.addHandler(logging.StreamHandler(sys.stdout))
|
||||
for the_handler in the_logger.handlers:
|
||||
the_handler.setFormatter(log_formatter)
|
||||
the_handler.setLevel(log_level)
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
import threading
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.process_instance_queue import (
|
||||
ProcessInstanceQueueModel,
|
||||
)
|
||||
|
||||
|
||||
class ProcessInstanceLockService:
|
||||
"""TODO: comment."""
|
||||
|
||||
@classmethod
|
||||
def set_thread_local_locking_context(cls, domain: str) -> None:
|
||||
current_app.config["THREAD_LOCAL_DATA"].lock_service_context = {
|
||||
"domain": domain,
|
||||
"uuid": current_app.config["PROCESS_UUID"],
|
||||
"thread_id": threading.get_ident(),
|
||||
"locks": {},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_thread_local_locking_context(cls) -> dict[str, Any]:
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
if not hasattr(tld, "lock_service_context"):
|
||||
cls.set_thread_local_locking_context("web")
|
||||
return tld.lock_service_context # type: ignore
|
||||
|
||||
@classmethod
|
||||
def locked_by(cls) -> str:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
return f"{ctx['domain']}:{ctx['uuid']}:{ctx['thread_id']}"
|
||||
|
||||
@classmethod
|
||||
def lock(
|
||||
cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel
|
||||
) -> None:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
ctx["locks"][process_instance_id] = queue_entry
|
||||
|
||||
@classmethod
|
||||
def lock_many(cls, queue_entries: List[ProcessInstanceQueueModel]) -> List[int]:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
new_locks = {entry.process_instance_id: entry for entry in queue_entries}
|
||||
new_lock_ids = list(new_locks.keys())
|
||||
ctx["locks"].update(new_locks)
|
||||
return new_lock_ids
|
||||
|
||||
@classmethod
|
||||
def unlock(cls, process_instance_id: int) -> ProcessInstanceQueueModel:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
return ctx["locks"].pop(process_instance_id) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def try_unlock(
|
||||
cls, process_instance_id: int
|
||||
) -> Optional[ProcessInstanceQueueModel]:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
return ctx["locks"].pop(process_instance_id, None) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def has_lock(cls, process_instance_id: int) -> bool:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
return process_instance_id in ctx["locks"]
|
|
@ -44,16 +44,13 @@ from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ig
|
|||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.task import TaskStateNames
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
from sqlalchemy import text
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||
|
@ -91,10 +88,29 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
ProcessInstanceLockService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import (
|
||||
ProcessInstanceQueueService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.task_service import TaskService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
execution_strategy_named,
|
||||
)
|
||||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
StepDetailLoggingDelegate,
|
||||
)
|
||||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
TaskModelSavingDelegate,
|
||||
)
|
||||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
WorkflowExecutionService,
|
||||
)
|
||||
|
||||
SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter)
|
||||
|
||||
|
@ -132,18 +148,14 @@ class MissingProcessInfoError(Exception):
|
|||
"""MissingProcessInfoError."""
|
||||
|
||||
|
||||
class ProcessInstanceIsAlreadyLockedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ProcessInstanceLockedBySomethingElseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SpiffStepDetailIsMissingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TaskNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
|
||||
def __init__(self, environment_globals: Dict[str, Any]):
|
||||
"""BoxedTaskDataBasedScriptEngineEnvironment."""
|
||||
|
@ -590,20 +602,45 @@ class ProcessInstanceProcessor:
|
|||
] = task_definition.properties_json
|
||||
|
||||
@classmethod
|
||||
def _get_bpmn_process_dict(cls, bpmn_process: BpmnProcessModel) -> dict:
|
||||
def _get_bpmn_process_dict(
|
||||
cls, bpmn_process: BpmnProcessModel, get_tasks: bool = False
|
||||
) -> dict:
|
||||
json_data = JsonDataModel.query.filter_by(
|
||||
hash=bpmn_process.json_data_hash
|
||||
).first()
|
||||
bpmn_process_dict = {"data": json_data.data, "tasks": {}}
|
||||
bpmn_process_dict.update(bpmn_process.properties_json)
|
||||
tasks = TaskModel.query.filter_by(bpmn_process_id=bpmn_process.id).all()
|
||||
for task in tasks:
|
||||
json_data = JsonDataModel.query.filter_by(hash=task.json_data_hash).first()
|
||||
bpmn_process_dict["tasks"][task.guid] = task.properties_json
|
||||
bpmn_process_dict["tasks"][task.guid]["data"] = json_data.data
|
||||
|
||||
if get_tasks:
|
||||
tasks = TaskModel.query.filter_by(bpmn_process_id=bpmn_process.id).all()
|
||||
cls._get_tasks_dict(tasks, bpmn_process_dict)
|
||||
return bpmn_process_dict
|
||||
|
||||
@classmethod
|
||||
def _get_tasks_dict(
|
||||
cls,
|
||||
tasks: list[TaskModel],
|
||||
spiff_bpmn_process_dict: dict,
|
||||
bpmn_subprocess_id_to_guid_mappings: Optional[dict] = None,
|
||||
) -> None:
|
||||
json_data_hashes = set()
|
||||
for task in tasks:
|
||||
json_data_hashes.add(task.json_data_hash)
|
||||
json_data_records = JsonDataModel.query.filter(JsonDataModel.hash.in_(json_data_hashes)).all() # type: ignore
|
||||
json_data_mappings = {}
|
||||
for json_data_record in json_data_records:
|
||||
json_data_mappings[json_data_record.hash] = json_data_record.data
|
||||
for task in tasks:
|
||||
tasks_dict = spiff_bpmn_process_dict["tasks"]
|
||||
if bpmn_subprocess_id_to_guid_mappings:
|
||||
bpmn_subprocess_guid = bpmn_subprocess_id_to_guid_mappings[
|
||||
task.bpmn_process_id
|
||||
]
|
||||
tasks_dict = spiff_bpmn_process_dict["subprocesses"][
|
||||
bpmn_subprocess_guid
|
||||
]["tasks"]
|
||||
tasks_dict[task.guid] = task.properties_json
|
||||
tasks_dict[task.guid]["data"] = json_data_mappings[task.json_data_hash]
|
||||
|
||||
@classmethod
|
||||
def _get_full_bpmn_process_dict(
|
||||
cls, process_instance_model: ProcessInstanceModel
|
||||
|
@ -630,17 +667,32 @@ class ProcessInstanceProcessor:
|
|||
|
||||
bpmn_process = process_instance_model.bpmn_process
|
||||
if bpmn_process is not None:
|
||||
bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process)
|
||||
spiff_bpmn_process_dict.update(bpmn_process_dict)
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(
|
||||
bpmn_process, get_tasks=True
|
||||
)
|
||||
spiff_bpmn_process_dict.update(single_bpmn_process_dict)
|
||||
|
||||
bpmn_subprocesses = BpmnProcessModel.query.filter_by(
|
||||
parent_process_id=bpmn_process.id
|
||||
).all()
|
||||
bpmn_subprocess_id_to_guid_mappings = {}
|
||||
for bpmn_subprocess in bpmn_subprocesses:
|
||||
bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_subprocess)
|
||||
bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = (
|
||||
bpmn_subprocess.guid
|
||||
)
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(
|
||||
bpmn_subprocess
|
||||
)
|
||||
spiff_bpmn_process_dict["subprocesses"][
|
||||
bpmn_subprocess.guid
|
||||
] = bpmn_process_dict
|
||||
] = single_bpmn_process_dict
|
||||
|
||||
tasks = TaskModel.query.filter(
|
||||
TaskModel.bpmn_process_id.in_(bpmn_subprocess_id_to_guid_mappings.keys()) # type: ignore
|
||||
).all()
|
||||
cls._get_tasks_dict(
|
||||
tasks, spiff_bpmn_process_dict, bpmn_subprocess_id_to_guid_mappings
|
||||
)
|
||||
|
||||
return spiff_bpmn_process_dict
|
||||
|
||||
|
@ -794,7 +846,7 @@ class ProcessInstanceProcessor:
|
|||
if start_in_seconds is None:
|
||||
start_in_seconds = time.time()
|
||||
|
||||
task_json = self.get_task_json_from_spiff_task(spiff_task)
|
||||
task_json = self.get_task_dict_from_spiff_task(spiff_task)
|
||||
|
||||
return {
|
||||
"process_instance_id": self.process_instance_model.id,
|
||||
|
@ -1026,91 +1078,12 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_definition_parent
|
||||
)
|
||||
|
||||
def _add_bpmn_process(
|
||||
self,
|
||||
bpmn_process_dict: dict,
|
||||
bpmn_process_parent: Optional[BpmnProcessModel] = None,
|
||||
bpmn_process_guid: Optional[str] = None,
|
||||
) -> BpmnProcessModel:
|
||||
tasks = bpmn_process_dict.pop("tasks")
|
||||
bpmn_process_data = bpmn_process_dict.pop("data")
|
||||
|
||||
bpmn_process = None
|
||||
if bpmn_process_parent is not None:
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(
|
||||
parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid
|
||||
).first()
|
||||
elif self.process_instance_model.bpmn_process_id is not None:
|
||||
bpmn_process = self.process_instance_model.bpmn_process
|
||||
|
||||
if bpmn_process is None:
|
||||
bpmn_process = BpmnProcessModel(guid=bpmn_process_guid)
|
||||
|
||||
bpmn_process.properties_json = bpmn_process_dict
|
||||
|
||||
bpmn_process_data_json = json.dumps(bpmn_process_data, sort_keys=True).encode(
|
||||
"utf8"
|
||||
)
|
||||
bpmn_process_data_hash = sha256(bpmn_process_data_json).hexdigest()
|
||||
if bpmn_process.json_data_hash != bpmn_process_data_hash:
|
||||
json_data = (
|
||||
db.session.query(JsonDataModel.id)
|
||||
.filter_by(hash=bpmn_process_data_hash)
|
||||
.first()
|
||||
)
|
||||
if json_data is None:
|
||||
json_data = JsonDataModel(
|
||||
hash=bpmn_process_data_hash, data=bpmn_process_data
|
||||
)
|
||||
db.session.add(json_data)
|
||||
bpmn_process.json_data_hash = bpmn_process_data_hash
|
||||
|
||||
if bpmn_process_parent is None:
|
||||
self.process_instance_model.bpmn_process = bpmn_process
|
||||
elif bpmn_process.parent_process_id is None:
|
||||
bpmn_process.parent_process_id = bpmn_process_parent.id
|
||||
db.session.add(bpmn_process)
|
||||
|
||||
for task_id, task_properties in tasks.items():
|
||||
task_data_dict = task_properties.pop("data")
|
||||
state_int = task_properties["state"]
|
||||
|
||||
task = TaskModel.query.filter_by(guid=task_id).first()
|
||||
if task is None:
|
||||
# bpmn_process_identifier = task_properties['workflow_name']
|
||||
# bpmn_identifier = task_properties['task_spec']
|
||||
#
|
||||
# task_definition = TaskDefinitionModel.query.filter_by(bpmn_identifier=bpmn_identifier)
|
||||
# .join(BpmnProcessDefinitionModel).filter(BpmnProcessDefinitionModel.bpmn_identifier==bpmn_process_identifier).first()
|
||||
# if task_definition is None:
|
||||
# subprocess_task = TaskModel.query.filter_by(guid=bpmn_process.guid)
|
||||
task = TaskModel(guid=task_id, bpmn_process_id=bpmn_process.id)
|
||||
task.state = TaskStateNames[state_int]
|
||||
task.properties_json = task_properties
|
||||
|
||||
task_data_json = json.dumps(task_data_dict, sort_keys=True).encode("utf8")
|
||||
task_data_hash = sha256(task_data_json).hexdigest()
|
||||
if task.json_data_hash != task_data_hash:
|
||||
json_data = (
|
||||
db.session.query(JsonDataModel.id)
|
||||
.filter_by(hash=task_data_hash)
|
||||
.first()
|
||||
)
|
||||
if json_data is None:
|
||||
json_data = JsonDataModel(hash=task_data_hash, data=task_data_dict)
|
||||
db.session.add(json_data)
|
||||
task.json_data_hash = task_data_hash
|
||||
db.session.add(task)
|
||||
|
||||
return bpmn_process
|
||||
|
||||
def _add_bpmn_json_records(self) -> None:
|
||||
"""Adds serialized_bpmn_definition and process_instance_data records to the db session.
|
||||
|
||||
Expects the save method to commit it.
|
||||
"""
|
||||
bpmn_dict = json.loads(self.serialize())
|
||||
# with open('tmp2.json', 'w') as f: f.write(json.dumps(bpmn_dict)
|
||||
bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version")
|
||||
process_instance_data_dict = {}
|
||||
bpmn_spec_dict = {}
|
||||
|
@ -1124,17 +1097,28 @@ class ProcessInstanceProcessor:
|
|||
# if self.process_instance_model.bpmn_process_definition_id is None:
|
||||
self._add_bpmn_process_definitions(bpmn_spec_dict)
|
||||
|
||||
# FIXME: Update tasks in the did_complete_task instead to set the final info.
|
||||
# We will need to somehow cache all tasks initially though before each task is run.
|
||||
# Maybe always do this for first run - just need to know it's the first run.
|
||||
subprocesses = process_instance_data_dict.pop("subprocesses")
|
||||
bpmn_process_parent = self._add_bpmn_process(process_instance_data_dict)
|
||||
bpmn_process_parent, new_task_models, new_json_data_dicts = (
|
||||
TaskService.add_bpmn_process(
|
||||
process_instance_data_dict, self.process_instance_model
|
||||
)
|
||||
)
|
||||
for subprocess_task_id, subprocess_properties in subprocesses.items():
|
||||
self._add_bpmn_process(
|
||||
(
|
||||
_bpmn_subprocess,
|
||||
subprocess_new_task_models,
|
||||
subprocess_new_json_data_models,
|
||||
) = TaskService.add_bpmn_process(
|
||||
subprocess_properties,
|
||||
self.process_instance_model,
|
||||
bpmn_process_parent,
|
||||
bpmn_process_guid=subprocess_task_id,
|
||||
)
|
||||
new_task_models.update(subprocess_new_task_models)
|
||||
new_json_data_dicts.update(subprocess_new_json_data_models)
|
||||
db.session.bulk_save_objects(new_task_models.values())
|
||||
|
||||
TaskService.insert_or_update_json_data_records(new_json_data_dicts)
|
||||
|
||||
def save(self) -> None:
|
||||
"""Saves the current state of this processor to the database."""
|
||||
|
@ -1266,6 +1250,8 @@ class ProcessInstanceProcessor:
|
|||
self.bpmn_process_instance.catch(event_definition)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
# TODO: do_engine_steps without a lock
|
||||
self.do_engine_steps(save=True)
|
||||
|
||||
def add_step(self, step: Union[dict, None] = None) -> None:
|
||||
|
@ -1556,55 +1542,13 @@ class ProcessInstanceProcessor:
|
|||
# current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}")
|
||||
return the_status
|
||||
|
||||
# inspiration from https://github.com/collectiveidea/delayed_job_active_record/blob/master/lib/delayed/backend/active_record.rb
|
||||
# could consider borrowing their "cleanup all my locks when the app quits" idea as well and
|
||||
# implement via https://docs.python.org/3/library/atexit.html
|
||||
# TODO: replace with implicit/more granular locking in workflow execution service
|
||||
def lock_process_instance(self, lock_prefix: str) -> None:
|
||||
current_app.config["THREAD_LOCAL_DATA"].locked_by_prefix = lock_prefix
|
||||
locked_by = f"{lock_prefix}_{current_app.config['PROCESS_UUID']}"
|
||||
current_time_in_seconds = round(time.time())
|
||||
lock_expiry_in_seconds = (
|
||||
current_time_in_seconds
|
||||
- current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS"
|
||||
]
|
||||
)
|
||||
|
||||
query_text = text(
|
||||
"UPDATE process_instance SET locked_at_in_seconds ="
|
||||
" :current_time_in_seconds, locked_by = :locked_by where id = :id AND"
|
||||
" (locked_by IS NULL OR locked_at_in_seconds < :lock_expiry_in_seconds);"
|
||||
).execution_options(autocommit=True)
|
||||
result = db.engine.execute(
|
||||
query_text,
|
||||
id=self.process_instance_model.id,
|
||||
current_time_in_seconds=current_time_in_seconds,
|
||||
locked_by=locked_by,
|
||||
lock_expiry_in_seconds=lock_expiry_in_seconds,
|
||||
)
|
||||
# it seems like autocommit is working above (we see the statement in debug logs) but sqlalchemy doesn't
|
||||
# seem to update properly so tell it to commit as well.
|
||||
# if we omit this line then querying the record from a unit test doesn't ever show the record as locked.
|
||||
db.session.commit()
|
||||
if result.rowcount < 1:
|
||||
raise ProcessInstanceIsAlreadyLockedError(
|
||||
f"Cannot lock process instance {self.process_instance_model.id}. "
|
||||
"It has already been locked."
|
||||
)
|
||||
ProcessInstanceQueueService.dequeue(self.process_instance_model)
|
||||
|
||||
# TODO: replace with implicit/more granular locking in workflow execution service
|
||||
def unlock_process_instance(self, lock_prefix: str) -> None:
|
||||
current_app.config["THREAD_LOCAL_DATA"].locked_by_prefix = None
|
||||
locked_by = f"{lock_prefix}_{current_app.config['PROCESS_UUID']}"
|
||||
if self.process_instance_model.locked_by != locked_by:
|
||||
raise ProcessInstanceLockedBySomethingElseError(
|
||||
f"Cannot unlock process instance {self.process_instance_model.id}."
|
||||
f"It locked by {self.process_instance_model.locked_by}"
|
||||
)
|
||||
|
||||
self.process_instance_model.locked_by = None
|
||||
self.process_instance_model.locked_at_in_seconds = None
|
||||
db.session.add(self.process_instance_model)
|
||||
db.session.commit()
|
||||
ProcessInstanceQueueService.enqueue(self.process_instance_model)
|
||||
|
||||
def process_bpmn_messages(self) -> None:
|
||||
"""Process_bpmn_messages."""
|
||||
|
@ -1666,100 +1610,52 @@ class ProcessInstanceProcessor:
|
|||
current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step
|
||||
db.session.add(self.process_instance_model)
|
||||
|
||||
# TODO remove after done with the performance improvements
|
||||
# to use delete the _ prefix here and add it to the real def below
|
||||
def _do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""__do_engine_steps."""
|
||||
import cProfile
|
||||
from pstats import SortKey
|
||||
def do_engine_steps(
|
||||
self,
|
||||
exit_at: None = None,
|
||||
save: bool = False,
|
||||
execution_strategy_name: Optional[str] = None,
|
||||
) -> None:
|
||||
# NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and
|
||||
# set the TaskModelSavingDelegate's secondary_engine_step_delegate to None.
|
||||
def spiff_step_details_mapping_builder(
|
||||
task: SpiffTask, start: float, end: float
|
||||
) -> dict:
|
||||
self._script_engine.environment.revise_state_with_task_data(task)
|
||||
return self.spiff_step_details_mapping(task, start, end)
|
||||
|
||||
with cProfile.Profile() as pr:
|
||||
self._do_engine_steps(exit_at=exit_at, save=save)
|
||||
pr.print_stats(sort=SortKey.CUMULATIVE)
|
||||
step_delegate = StepDetailLoggingDelegate(
|
||||
self.increment_spiff_step, spiff_step_details_mapping_builder
|
||||
)
|
||||
task_model_delegate = TaskModelSavingDelegate(
|
||||
secondary_engine_step_delegate=step_delegate,
|
||||
serializer=self._serializer,
|
||||
process_instance=self.process_instance_model,
|
||||
)
|
||||
|
||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""Do_engine_steps."""
|
||||
step_details = []
|
||||
if execution_strategy_name is None:
|
||||
execution_strategy_name = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"
|
||||
]
|
||||
|
||||
tasks_to_log = {
|
||||
"BPMN Task",
|
||||
"Script Task",
|
||||
"Service Task",
|
||||
"Default Start Event",
|
||||
"Exclusive Gateway",
|
||||
"Call Activity",
|
||||
# "End Join",
|
||||
"End Event",
|
||||
"Default Throwing Event",
|
||||
"Subprocess",
|
||||
"Transactional Subprocess",
|
||||
}
|
||||
|
||||
# making a dictionary to ensure we are not shadowing variables in the other methods
|
||||
current_task_start_in_seconds = {}
|
||||
|
||||
def should_log(task: SpiffTask) -> bool:
|
||||
if (
|
||||
task.task_spec.spec_type in tasks_to_log
|
||||
and not task.task_spec.name.endswith(".EndJoin")
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def will_complete_task(task: SpiffTask) -> None:
|
||||
if should_log(task):
|
||||
current_task_start_in_seconds["time"] = time.time()
|
||||
self.increment_spiff_step()
|
||||
|
||||
def did_complete_task(task: SpiffTask) -> None:
|
||||
if should_log(task):
|
||||
self._script_engine.environment.revise_state_with_task_data(task)
|
||||
step_details.append(
|
||||
self.spiff_step_details_mapping(
|
||||
task, current_task_start_in_seconds["time"], time.time()
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||
|
||||
self.bpmn_process_instance.do_engine_steps(
|
||||
exit_at=exit_at,
|
||||
will_complete_task=will_complete_task,
|
||||
did_complete_task=did_complete_task,
|
||||
)
|
||||
|
||||
if self.bpmn_process_instance.is_completed():
|
||||
self._script_engine.environment.finalize_result(
|
||||
self.bpmn_process_instance
|
||||
)
|
||||
|
||||
self.process_bpmn_messages()
|
||||
self.queue_waiting_receive_messages()
|
||||
except SpiffWorkflowException as swe:
|
||||
raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe
|
||||
|
||||
finally:
|
||||
# self.log_spiff_step_details(step_details)
|
||||
db.session.bulk_insert_mappings(SpiffStepDetailsModel, step_details)
|
||||
spiff_logger = logging.getLogger("spiff")
|
||||
for handler in spiff_logger.handlers:
|
||||
if hasattr(handler, "bulk_insert_logs"):
|
||||
handler.bulk_insert_logs() # type: ignore
|
||||
db.session.commit()
|
||||
|
||||
if save:
|
||||
self.save()
|
||||
execution_strategy = execution_strategy_named(
|
||||
execution_strategy_name, task_model_delegate
|
||||
)
|
||||
execution_service = WorkflowExecutionService(
|
||||
self.bpmn_process_instance,
|
||||
self.process_instance_model,
|
||||
execution_strategy,
|
||||
self._script_engine.environment.finalize_result,
|
||||
self.save,
|
||||
)
|
||||
execution_service.do_engine_steps(exit_at, save)
|
||||
|
||||
# log the spiff step details so we know what is processing the process
|
||||
# instance when a human task has a timer event.
|
||||
def log_spiff_step_details(self, step_details: Any) -> None:
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
if hasattr(tld, "locked_by_prefix") and len(step_details) > 0:
|
||||
locked_by_prefix = tld.locked_by_prefix
|
||||
message = (
|
||||
f"ADDING SPIFF BULK STEP DETAILS: {locked_by_prefix}: {step_details}"
|
||||
)
|
||||
if ProcessInstanceLockService.has_lock(self.process_instance_model.id):
|
||||
locked_by = ProcessInstanceLockService.locked_by()
|
||||
message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}"
|
||||
current_app.logger.debug(message)
|
||||
|
||||
def cancel_notify(self) -> None:
|
||||
|
@ -1774,6 +1670,7 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_instance.signal("cancel") # generate a cancel signal.
|
||||
bpmn_process_instance.catch(CancelEventDefinition())
|
||||
# Due to this being static, can't save granular step details in this case
|
||||
# TODO: do_engine_steps without a lock
|
||||
bpmn_process_instance.do_engine_steps()
|
||||
except WorkflowTaskException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
|
@ -1919,7 +1816,7 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
return user_tasks # type: ignore
|
||||
|
||||
def get_task_json_from_spiff_task(self, spiff_task: SpiffTask) -> dict[str, Any]:
|
||||
def get_task_dict_from_spiff_task(self, spiff_task: SpiffTask) -> dict[str, Any]:
|
||||
default_registry = DefaultRegistry()
|
||||
task_data = default_registry.convert(spiff_task.data)
|
||||
python_env = default_registry.convert(
|
||||
|
@ -1932,17 +1829,30 @@ class ProcessInstanceProcessor:
|
|||
return task_json
|
||||
|
||||
def complete_task(
|
||||
self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel
|
||||
self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel
|
||||
) -> None:
|
||||
"""Complete_task."""
|
||||
self.bpmn_process_instance.complete_task_from_id(task.id)
|
||||
task_model = TaskModel.query.filter_by(guid=human_task.task_id).first()
|
||||
if task_model is None:
|
||||
raise TaskNotFoundError(
|
||||
"Cannot find a task with guid"
|
||||
f" {self.process_instance_model.id} and task_id is {human_task.task_id}"
|
||||
)
|
||||
|
||||
task_model.start_in_seconds = time.time()
|
||||
self.bpmn_process_instance.complete_task_from_id(spiff_task.id)
|
||||
task_model.end_in_seconds = time.time()
|
||||
|
||||
human_task.completed_by_user_id = user.id
|
||||
human_task.completed = True
|
||||
human_task.task_status = spiff_task.get_state_name()
|
||||
db.session.add(human_task)
|
||||
|
||||
# FIXME: remove when we switch over to using tasks only
|
||||
details_model = (
|
||||
SpiffStepDetailsModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
task_id=str(task.id),
|
||||
task_id=str(spiff_task.id),
|
||||
task_state="READY",
|
||||
)
|
||||
.order_by(SpiffStepDetailsModel.id.desc()) # type: ignore
|
||||
|
@ -1951,13 +1861,28 @@ class ProcessInstanceProcessor:
|
|||
if details_model is None:
|
||||
raise SpiffStepDetailIsMissingError(
|
||||
"Cannot find a ready spiff_step_detail entry for process instance"
|
||||
f" {self.process_instance_model.id} and task_id is {task.id}"
|
||||
f" {self.process_instance_model.id} and task_id is {spiff_task.id}"
|
||||
)
|
||||
|
||||
details_model.task_state = task.get_state_name()
|
||||
details_model.task_state = spiff_task.get_state_name()
|
||||
details_model.end_in_seconds = time.time()
|
||||
details_model.task_json = self.get_task_json_from_spiff_task(task)
|
||||
details_model.task_json = self.get_task_dict_from_spiff_task(spiff_task)
|
||||
db.session.add(details_model)
|
||||
# #######
|
||||
|
||||
json_data_dict = TaskService.update_task_model(
|
||||
task_model, spiff_task, self._serializer
|
||||
)
|
||||
if json_data_dict is not None:
|
||||
json_data = (
|
||||
db.session.query(JsonDataModel.id)
|
||||
.filter_by(hash=json_data_dict["hash"])
|
||||
.first()
|
||||
)
|
||||
if json_data is None:
|
||||
json_data = JsonDataModel(**json_data_dict)
|
||||
db.session.add(json_data)
|
||||
|
||||
# this is the thing that actually commits the db transaction (on behalf of the other updates above as well)
|
||||
self.save()
|
||||
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
import time
|
||||
from typing import List
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_instance_queue import (
|
||||
ProcessInstanceQueueModel,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
ProcessInstanceLockService,
|
||||
)
|
||||
|
||||
|
||||
class ProcessInstanceIsAlreadyLockedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ProcessInstanceQueueService:
|
||||
"""TODO: comment."""
|
||||
|
||||
@staticmethod
|
||||
def enqueue(process_instance: ProcessInstanceModel) -> None:
|
||||
queue_item = ProcessInstanceLockService.try_unlock(process_instance.id)
|
||||
|
||||
if queue_item is None:
|
||||
queue_item = ProcessInstanceQueueModel(
|
||||
process_instance_id=process_instance.id
|
||||
)
|
||||
|
||||
# TODO: configurable params (priority/run_at)
|
||||
queue_item.run_at_in_seconds = round(time.time())
|
||||
queue_item.priority = 2
|
||||
queue_item.status = process_instance.status
|
||||
queue_item.locked_by = None
|
||||
queue_item.locked_at_in_seconds = None
|
||||
|
||||
db.session.add(queue_item)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def dequeue(process_instance: ProcessInstanceModel) -> None:
|
||||
if ProcessInstanceLockService.has_lock(process_instance.id):
|
||||
return
|
||||
|
||||
locked_by = ProcessInstanceLockService.locked_by()
|
||||
|
||||
db.session.query(ProcessInstanceQueueModel).filter(
|
||||
ProcessInstanceQueueModel.process_instance_id == process_instance.id,
|
||||
ProcessInstanceQueueModel.locked_by.is_(None), # type: ignore
|
||||
).update(
|
||||
{
|
||||
"locked_by": locked_by,
|
||||
}
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
queue_entry = (
|
||||
db.session.query(ProcessInstanceQueueModel)
|
||||
.filter(
|
||||
ProcessInstanceQueueModel.process_instance_id == process_instance.id,
|
||||
ProcessInstanceQueueModel.locked_by == locked_by,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if queue_entry is None:
|
||||
raise ProcessInstanceIsAlreadyLockedError(
|
||||
f"Cannot lock process instance {process_instance.id}. "
|
||||
"It has already been locked or has not been enqueued."
|
||||
)
|
||||
|
||||
ProcessInstanceLockService.lock(process_instance.id, queue_entry)
|
||||
|
||||
@staticmethod
|
||||
def dequeue_many(
|
||||
status_value: str = ProcessInstanceStatus.waiting.value,
|
||||
) -> List[int]:
|
||||
locked_by = ProcessInstanceLockService.locked_by()
|
||||
|
||||
# TODO: configurable params (priority/run_at/limit)
|
||||
db.session.query(ProcessInstanceQueueModel).filter(
|
||||
ProcessInstanceQueueModel.status == status_value,
|
||||
ProcessInstanceQueueModel.locked_by.is_(None), # type: ignore
|
||||
).update(
|
||||
{
|
||||
"locked_by": locked_by,
|
||||
}
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
queue_entries = (
|
||||
db.session.query(ProcessInstanceQueueModel)
|
||||
.filter(
|
||||
ProcessInstanceQueueModel.status == status_value,
|
||||
ProcessInstanceQueueModel.locked_by == locked_by,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
locked_ids = ProcessInstanceLockService.lock_many(queue_entries)
|
||||
|
||||
if len(locked_ids) > 0:
|
||||
current_app.logger.info(f"{locked_by} dequeued_many: {locked_ids}")
|
||||
|
||||
return locked_ids
|
|
@ -29,10 +29,13 @@ from spiffworkflow_backend.services.authorization_service import AuthorizationSe
|
|||
from spiffworkflow_backend.services.git_service import GitCommandError
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import (
|
||||
ProcessInstanceIsAlreadyLockedError,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import (
|
||||
ProcessInstanceQueueService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
@ -81,9 +84,15 @@ class ProcessInstanceService:
|
|||
@staticmethod
|
||||
def do_waiting(status_value: str = ProcessInstanceStatus.waiting.value) -> None:
|
||||
"""Do_waiting."""
|
||||
locked_process_instance_ids = ProcessInstanceQueueService.dequeue_many(
|
||||
status_value
|
||||
)
|
||||
if len(locked_process_instance_ids) == 0:
|
||||
return
|
||||
|
||||
records = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.status == status_value)
|
||||
.filter(ProcessInstanceModel.id.in_(locked_process_instance_ids)) # type: ignore
|
||||
.all()
|
||||
)
|
||||
process_instance_lock_prefix = "Background"
|
||||
|
@ -97,7 +106,12 @@ class ProcessInstanceService:
|
|||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.lock_process_instance(process_instance_lock_prefix)
|
||||
locked = True
|
||||
processor.do_engine_steps(save=True)
|
||||
execution_strategy_name = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"
|
||||
]
|
||||
processor.do_engine_steps(
|
||||
save=True, execution_strategy_name=execution_strategy_name
|
||||
)
|
||||
except ProcessInstanceIsAlreadyLockedError:
|
||||
continue
|
||||
except Exception as e:
|
||||
|
|
|
@ -15,7 +15,16 @@ class SecretService:
|
|||
|
||||
@classmethod
|
||||
def _encrypt(cls, value: str) -> str:
|
||||
encrypted_bytes: bytes = current_app.config["CIPHER"].encrypt(value)
|
||||
encrypted_bytes: bytes = b""
|
||||
if (
|
||||
current_app.config.get("SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB")
|
||||
== "cryptography"
|
||||
):
|
||||
# cryptography needs a bytes object
|
||||
value_as_bytes = str.encode(value)
|
||||
encrypted_bytes = current_app.config["CIPHER"].encrypt(value_as_bytes)
|
||||
else:
|
||||
encrypted_bytes = current_app.config["CIPHER"].encrypt(value)
|
||||
return encrypted_bytes.decode(cls.CIPHER_ENCODING)
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -91,54 +91,57 @@ class ServiceTaskDelegate:
|
|||
def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str:
|
||||
"""Calls a connector via the configured proxy."""
|
||||
call_url = f"{connector_proxy_url()}/v1/do/{name}"
|
||||
with sentry_sdk.start_span(op="call-connector", description=call_url):
|
||||
params = {
|
||||
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
||||
for k, v in bpmn_params.items()
|
||||
}
|
||||
params["spiff__task_data"] = task_data
|
||||
current_app.logger.info(f"Calling connector proxy using connector: {name}")
|
||||
with sentry_sdk.start_span(op="connector_by_name", description=name):
|
||||
with sentry_sdk.start_span(op="call-connector", description=call_url):
|
||||
params = {
|
||||
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
||||
for k, v in bpmn_params.items()
|
||||
}
|
||||
params["spiff__task_data"] = task_data
|
||||
|
||||
proxied_response = requests.post(call_url, json=params)
|
||||
response_text = proxied_response.text
|
||||
json_parse_error = None
|
||||
proxied_response = requests.post(call_url, json=params)
|
||||
response_text = proxied_response.text
|
||||
json_parse_error = None
|
||||
|
||||
if response_text == "":
|
||||
response_text = "{}"
|
||||
try:
|
||||
parsed_response = json.loads(response_text)
|
||||
except Exception as e:
|
||||
json_parse_error = e
|
||||
parsed_response = {}
|
||||
if response_text == "":
|
||||
response_text = "{}"
|
||||
try:
|
||||
parsed_response = json.loads(response_text)
|
||||
except Exception as e:
|
||||
json_parse_error = e
|
||||
parsed_response = {}
|
||||
|
||||
if proxied_response.status_code >= 300:
|
||||
message = ServiceTaskDelegate.get_message_for_status(
|
||||
proxied_response.status_code
|
||||
)
|
||||
error = (
|
||||
f"Received an unexpected response from service {name} : {message}"
|
||||
)
|
||||
if "error" in parsed_response:
|
||||
error += parsed_response["error"]
|
||||
if json_parse_error:
|
||||
error += (
|
||||
"A critical component (The connector proxy) is not responding"
|
||||
" correctly."
|
||||
if proxied_response.status_code >= 300:
|
||||
message = ServiceTaskDelegate.get_message_for_status(
|
||||
proxied_response.status_code
|
||||
)
|
||||
error = (
|
||||
f"Received an unexpected response from service {name} :"
|
||||
f" {message}"
|
||||
)
|
||||
if "error" in parsed_response:
|
||||
error += parsed_response["error"]
|
||||
if json_parse_error:
|
||||
error += (
|
||||
"A critical component (The connector proxy) is not"
|
||||
" responding correctly."
|
||||
)
|
||||
raise ConnectorProxyError(error)
|
||||
elif json_parse_error:
|
||||
raise ConnectorProxyError(
|
||||
f"There is a problem with this connector: '{name}'. "
|
||||
"Responses for connectors must be in JSON format. "
|
||||
)
|
||||
raise ConnectorProxyError(error)
|
||||
elif json_parse_error:
|
||||
raise ConnectorProxyError(
|
||||
f"There is a problem with this connector: '{name}'. "
|
||||
"Responses for connectors must be in JSON format. "
|
||||
)
|
||||
|
||||
if "refreshed_token_set" not in parsed_response:
|
||||
return response_text
|
||||
if "refreshed_token_set" not in parsed_response:
|
||||
return response_text
|
||||
|
||||
secret_key = parsed_response["auth"]
|
||||
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
|
||||
user_id = g.user.id if UserService.has_user() else None
|
||||
SecretService.update_secret(secret_key, refreshed_token_set, user_id)
|
||||
return json.dumps(parsed_response["api_response"])
|
||||
secret_key = parsed_response["auth"]
|
||||
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
|
||||
user_id = g.user.id if UserService.has_user() else None
|
||||
SecretService.update_secret(secret_key, refreshed_token_set, user_id)
|
||||
return json.dumps(parsed_response["api_response"])
|
||||
|
||||
|
||||
class ServiceTaskService:
|
||||
|
|
|
@ -0,0 +1,247 @@
|
|||
import json
|
||||
from hashlib import sha256
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TypedDict
|
||||
|
||||
from flask import current_app
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskStateNames
|
||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||
from sqlalchemy.dialects.postgresql import insert as postgres_insert
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
|
||||
|
||||
class JsonDataDict(TypedDict):
|
||||
hash: str
|
||||
data: dict
|
||||
|
||||
|
||||
class TaskService:
|
||||
@classmethod
|
||||
def insert_or_update_json_data_records(
|
||||
cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict]
|
||||
) -> None:
|
||||
list_of_dicts = [*json_data_hash_to_json_data_dict_mapping.values()]
|
||||
if len(list_of_dicts) > 0:
|
||||
on_duplicate_key_stmt = None
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
|
||||
insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts)
|
||||
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
|
||||
data=insert_stmt.inserted.data, status="U"
|
||||
)
|
||||
else:
|
||||
insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts)
|
||||
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(
|
||||
index_elements=["hash"]
|
||||
)
|
||||
db.session.execute(on_duplicate_key_stmt)
|
||||
|
||||
@classmethod
|
||||
def _update_task_data_on_task_model(
|
||||
cls, task_model: TaskModel, task_data_dict: dict
|
||||
) -> Optional[JsonDataDict]:
|
||||
task_data_json = json.dumps(task_data_dict, sort_keys=True)
|
||||
task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest()
|
||||
json_data_dict: Optional[JsonDataDict] = None
|
||||
if task_model.json_data_hash != task_data_hash:
|
||||
json_data_dict = {"hash": task_data_hash, "data": task_data_dict}
|
||||
task_model.json_data_hash = task_data_hash
|
||||
return json_data_dict
|
||||
|
||||
@classmethod
|
||||
def update_task_model(
|
||||
cls,
|
||||
task_model: TaskModel,
|
||||
spiff_task: SpiffTask,
|
||||
serializer: BpmnWorkflowSerializer,
|
||||
) -> Optional[JsonDataDict]:
|
||||
"""Updates properties_json and data on given task_model.
|
||||
|
||||
This will NOT update start_in_seconds or end_in_seconds.
|
||||
It also returns the relating json_data object so they can be imported later.
|
||||
"""
|
||||
new_properties_json = serializer.task_to_dict(spiff_task)
|
||||
spiff_task_data = new_properties_json.pop("data")
|
||||
task_model.properties_json = new_properties_json
|
||||
task_model.state = TaskStateNames[new_properties_json["state"]]
|
||||
json_data_dict = cls._update_task_data_on_task_model(
|
||||
task_model, spiff_task_data
|
||||
)
|
||||
return json_data_dict
|
||||
|
||||
@classmethod
|
||||
def find_or_create_task_model_from_spiff_task(
|
||||
cls,
|
||||
spiff_task: SpiffTask,
|
||||
process_instance: ProcessInstanceModel,
|
||||
serializer: BpmnWorkflowSerializer,
|
||||
) -> Tuple[
|
||||
Optional[BpmnProcessModel],
|
||||
TaskModel,
|
||||
dict[str, TaskModel],
|
||||
dict[str, JsonDataDict],
|
||||
]:
|
||||
spiff_task_guid = str(spiff_task.id)
|
||||
task_model: Optional[TaskModel] = TaskModel.query.filter_by(
|
||||
guid=spiff_task_guid
|
||||
).first()
|
||||
bpmn_process = None
|
||||
new_task_models: dict[str, TaskModel] = {}
|
||||
new_json_data_dicts: dict[str, JsonDataDict] = {}
|
||||
if task_model is None:
|
||||
bpmn_process, new_task_models, new_json_data_dicts = cls.task_bpmn_process(
|
||||
spiff_task, process_instance, serializer
|
||||
)
|
||||
task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first()
|
||||
if task_model is None:
|
||||
task_model = TaskModel(
|
||||
guid=spiff_task_guid, bpmn_process_id=bpmn_process.id
|
||||
)
|
||||
return (bpmn_process, task_model, new_task_models, new_json_data_dicts)
|
||||
|
||||
@classmethod
|
||||
def task_subprocess(
|
||||
cls, spiff_task: SpiffTask
|
||||
) -> Tuple[Optional[str], Optional[BpmnWorkflow]]:
|
||||
top_level_workflow = spiff_task.workflow._get_outermost_workflow()
|
||||
my_wf = spiff_task.workflow # This is the workflow the spiff_task is part of
|
||||
my_sp = None
|
||||
my_sp_id = None
|
||||
if my_wf != top_level_workflow:
|
||||
# All the subprocesses are at the top level, so you can just compare them
|
||||
for sp_id, sp in top_level_workflow.subprocesses.items():
|
||||
if sp == my_wf:
|
||||
my_sp = sp
|
||||
my_sp_id = sp_id
|
||||
break
|
||||
return (str(my_sp_id), my_sp)
|
||||
|
||||
@classmethod
|
||||
def task_bpmn_process(
|
||||
cls,
|
||||
spiff_task: SpiffTask,
|
||||
process_instance: ProcessInstanceModel,
|
||||
serializer: BpmnWorkflowSerializer,
|
||||
) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]:
|
||||
subprocess_guid, subprocess = cls.task_subprocess(spiff_task)
|
||||
bpmn_process: Optional[BpmnProcessModel] = None
|
||||
new_task_models: dict[str, TaskModel] = {}
|
||||
new_json_data_dicts: dict[str, JsonDataDict] = {}
|
||||
if subprocess is None:
|
||||
bpmn_process = process_instance.bpmn_process
|
||||
# This is the top level workflow, which has no guid
|
||||
# check for bpmn_process_id because mypy doesn't realize bpmn_process can be None
|
||||
if process_instance.bpmn_process_id is None:
|
||||
bpmn_process, new_task_models, new_json_data_dicts = (
|
||||
cls.add_bpmn_process(
|
||||
serializer.workflow_to_dict(
|
||||
spiff_task.workflow._get_outermost_workflow()
|
||||
),
|
||||
process_instance,
|
||||
)
|
||||
)
|
||||
else:
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(
|
||||
guid=subprocess_guid
|
||||
).first()
|
||||
if bpmn_process is None:
|
||||
bpmn_process, new_task_models, new_json_data_dicts = (
|
||||
cls.add_bpmn_process(
|
||||
serializer.workflow_to_dict(subprocess),
|
||||
process_instance,
|
||||
process_instance.bpmn_process,
|
||||
subprocess_guid,
|
||||
)
|
||||
)
|
||||
return (bpmn_process, new_task_models, new_json_data_dicts)
|
||||
|
||||
@classmethod
|
||||
def add_bpmn_process(
|
||||
cls,
|
||||
bpmn_process_dict: dict,
|
||||
process_instance: ProcessInstanceModel,
|
||||
bpmn_process_parent: Optional[BpmnProcessModel] = None,
|
||||
bpmn_process_guid: Optional[str] = None,
|
||||
) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]:
|
||||
"""This creates and adds a bpmn_process to the Db session.
|
||||
|
||||
It will also add tasks and relating json_data entries if the bpmn_process is new.
|
||||
It returns tasks and json data records in dictionaries to be added to the session later.
|
||||
"""
|
||||
tasks = bpmn_process_dict.pop("tasks")
|
||||
bpmn_process_data_dict = bpmn_process_dict.pop("data")
|
||||
|
||||
new_task_models = {}
|
||||
new_json_data_dicts: dict[str, JsonDataDict] = {}
|
||||
|
||||
bpmn_process = None
|
||||
if bpmn_process_parent is not None:
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(
|
||||
parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid
|
||||
).first()
|
||||
elif process_instance.bpmn_process_id is not None:
|
||||
bpmn_process = process_instance.bpmn_process
|
||||
|
||||
bpmn_process_is_new = False
|
||||
if bpmn_process is None:
|
||||
bpmn_process_is_new = True
|
||||
bpmn_process = BpmnProcessModel(guid=bpmn_process_guid)
|
||||
|
||||
bpmn_process.properties_json = bpmn_process_dict
|
||||
|
||||
bpmn_process_data_json = json.dumps(bpmn_process_data_dict, sort_keys=True)
|
||||
bpmn_process_data_hash = sha256(
|
||||
bpmn_process_data_json.encode("utf8")
|
||||
).hexdigest()
|
||||
if bpmn_process.json_data_hash != bpmn_process_data_hash:
|
||||
new_json_data_dicts[bpmn_process_data_hash] = {
|
||||
"hash": bpmn_process_data_hash,
|
||||
"data": bpmn_process_data_dict,
|
||||
}
|
||||
bpmn_process.json_data_hash = bpmn_process_data_hash
|
||||
|
||||
if bpmn_process_parent is None:
|
||||
process_instance.bpmn_process = bpmn_process
|
||||
elif bpmn_process.parent_process_id is None:
|
||||
bpmn_process.parent_process_id = bpmn_process_parent.id
|
||||
|
||||
# Since we bulk insert tasks later we need to add the bpmn_process to the session
|
||||
# to ensure we have an id.
|
||||
db.session.add(bpmn_process)
|
||||
|
||||
if bpmn_process_is_new:
|
||||
for task_id, task_properties in tasks.items():
|
||||
task_data_dict = task_properties.pop("data")
|
||||
state_int = task_properties["state"]
|
||||
|
||||
task_model = TaskModel.query.filter_by(guid=task_id).first()
|
||||
if task_model is None:
|
||||
# bpmn_process_identifier = task_properties['workflow_name']
|
||||
# bpmn_identifier = task_properties['task_spec']
|
||||
#
|
||||
# task_definition = TaskDefinitionModel.query.filter_by(bpmn_identifier=bpmn_identifier)
|
||||
# .join(BpmnProcessDefinitionModel).filter(BpmnProcessDefinitionModel.bpmn_identifier==bpmn_process_identifier).first()
|
||||
# if task_definition is None:
|
||||
# subprocess_task = TaskModel.query.filter_by(guid=bpmn_process.guid)
|
||||
task_model = TaskModel(
|
||||
guid=task_id, bpmn_process_id=bpmn_process.id
|
||||
)
|
||||
task_model.state = TaskStateNames[state_int]
|
||||
task_model.properties_json = task_properties
|
||||
|
||||
json_data_dict = TaskService._update_task_data_on_task_model(
|
||||
task_model, task_data_dict
|
||||
)
|
||||
new_task_models[task_model.guid] = task_model
|
||||
if json_data_dict is not None:
|
||||
new_json_data_dicts[json_data_dict["hash"]] = json_data_dict
|
||||
|
||||
return (bpmn_process, new_task_models, new_json_data_dicts)
|
|
@ -0,0 +1,405 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_instance_correlation import (
|
||||
MessageInstanceCorrelationRuleModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.services.assertion_service import safe_assertion
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
ProcessInstanceLockService,
|
||||
)
|
||||
from spiffworkflow_backend.services.task_service import JsonDataDict
|
||||
from spiffworkflow_backend.services.task_service import TaskService
|
||||
|
||||
|
||||
class EngineStepDelegate:
|
||||
"""Interface of sorts for a concrete engine step delegate."""
|
||||
|
||||
def will_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
pass
|
||||
|
||||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
pass
|
||||
|
||||
def save(self, commit: bool = False) -> None:
|
||||
pass
|
||||
|
||||
def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
pass
|
||||
|
||||
|
||||
SpiffStepIncrementer = Callable[[], None]
|
||||
SpiffStepDetailsMappingBuilder = Callable[[SpiffTask, float, float], dict]
|
||||
|
||||
|
||||
class TaskModelSavingDelegate(EngineStepDelegate):
|
||||
"""Engine step delegate that takes care of saving a task model to the database.
|
||||
|
||||
It can also be given another EngineStepDelegate.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
serializer: BpmnWorkflowSerializer,
|
||||
process_instance: ProcessInstanceModel,
|
||||
secondary_engine_step_delegate: Optional[EngineStepDelegate] = None,
|
||||
) -> None:
|
||||
self.secondary_engine_step_delegate = secondary_engine_step_delegate
|
||||
self.process_instance = process_instance
|
||||
|
||||
self.current_task_model: Optional[TaskModel] = None
|
||||
self.task_models: dict[str, TaskModel] = {}
|
||||
self.json_data_dicts: dict[str, JsonDataDict] = {}
|
||||
self.serializer = serializer
|
||||
|
||||
def should_update_task_model(self) -> bool:
|
||||
"""We need to figure out if we have previously save task info on this process intance.
|
||||
|
||||
Use the bpmn_process_id to do this.
|
||||
"""
|
||||
return self.process_instance.bpmn_process_id is not None
|
||||
|
||||
def will_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.should_update_task_model():
|
||||
_bpmn_process, task_model, new_task_models, new_json_data_dicts = (
|
||||
TaskService.find_or_create_task_model_from_spiff_task(
|
||||
spiff_task, self.process_instance, self.serializer
|
||||
)
|
||||
)
|
||||
self.current_task_model = task_model
|
||||
self.task_models.update(new_task_models)
|
||||
self.json_data_dicts.update(new_json_data_dicts)
|
||||
self.current_task_model.start_in_seconds = time.time()
|
||||
if self.secondary_engine_step_delegate:
|
||||
self.secondary_engine_step_delegate.will_complete_task(spiff_task)
|
||||
|
||||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.current_task_model and self.should_update_task_model():
|
||||
self.current_task_model.end_in_seconds = time.time()
|
||||
json_data_dict = TaskService.update_task_model(
|
||||
self.current_task_model, spiff_task, self.serializer
|
||||
)
|
||||
if json_data_dict is not None:
|
||||
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
|
||||
self.task_models[self.current_task_model.guid] = self.current_task_model
|
||||
if self.secondary_engine_step_delegate:
|
||||
self.secondary_engine_step_delegate.did_complete_task(spiff_task)
|
||||
|
||||
def save(self, _commit: bool = True) -> None:
|
||||
db.session.bulk_save_objects(self.task_models.values())
|
||||
|
||||
TaskService.insert_or_update_json_data_records(self.json_data_dicts)
|
||||
|
||||
if self.secondary_engine_step_delegate:
|
||||
self.secondary_engine_step_delegate.save(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
if self.should_update_task_model():
|
||||
# excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion.
|
||||
for waiting_spiff_task in bpmn_process_instance.get_tasks(
|
||||
TaskState.WAITING
|
||||
| TaskState.CANCELLED
|
||||
| TaskState.READY
|
||||
| TaskState.MAYBE
|
||||
| TaskState.LIKELY
|
||||
):
|
||||
_bpmn_process, task_model, new_task_models, new_json_data_dicts = (
|
||||
TaskService.find_or_create_task_model_from_spiff_task(
|
||||
waiting_spiff_task, self.process_instance, self.serializer
|
||||
)
|
||||
)
|
||||
self.task_models.update(new_task_models)
|
||||
self.json_data_dicts.update(new_json_data_dicts)
|
||||
json_data_dict = TaskService.update_task_model(
|
||||
task_model, waiting_spiff_task, self.serializer
|
||||
)
|
||||
self.task_models[task_model.guid] = task_model
|
||||
if json_data_dict is not None:
|
||||
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
|
||||
|
||||
|
||||
class StepDetailLoggingDelegate(EngineStepDelegate):
|
||||
"""Engine step delegate that takes care of logging spiff step details.
|
||||
|
||||
This separates the concerns of step execution and step logging.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
increment_spiff_step: SpiffStepIncrementer,
|
||||
spiff_step_details_mapping: SpiffStepDetailsMappingBuilder,
|
||||
):
|
||||
"""__init__."""
|
||||
self.increment_spiff_step = increment_spiff_step
|
||||
self.spiff_step_details_mapping = spiff_step_details_mapping
|
||||
self.step_details: List[dict] = []
|
||||
self.current_task_start_in_seconds = 0.0
|
||||
self.tasks_to_log = {
|
||||
"BPMN Task",
|
||||
"Script Task",
|
||||
"Service Task",
|
||||
"Default Start Event",
|
||||
"Exclusive Gateway",
|
||||
"Call Activity",
|
||||
# "End Join",
|
||||
"End Event",
|
||||
"Default Throwing Event",
|
||||
"Subprocess",
|
||||
"Transactional Subprocess",
|
||||
}
|
||||
|
||||
def should_log(self, spiff_task: SpiffTask) -> bool:
|
||||
return (
|
||||
spiff_task.task_spec.spec_type in self.tasks_to_log
|
||||
and not spiff_task.task_spec.name.endswith(".EndJoin")
|
||||
)
|
||||
|
||||
def will_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.should_log(spiff_task):
|
||||
self.current_task_start_in_seconds = time.time()
|
||||
self.increment_spiff_step()
|
||||
|
||||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.should_log(spiff_task):
|
||||
self.step_details.append(
|
||||
self.spiff_step_details_mapping(
|
||||
spiff_task, self.current_task_start_in_seconds, time.time()
|
||||
)
|
||||
)
|
||||
|
||||
def save(self, commit: bool = True) -> None:
|
||||
db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details)
|
||||
if commit:
|
||||
db.session.commit()
|
||||
|
||||
|
||||
class ExecutionStrategy:
|
||||
"""Interface of sorts for a concrete execution strategy."""
|
||||
|
||||
def __init__(self, delegate: EngineStepDelegate):
|
||||
"""__init__."""
|
||||
self.delegate = delegate
|
||||
|
||||
def do_engine_steps(
|
||||
self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def save(self) -> None:
|
||||
self.delegate.save()
|
||||
|
||||
|
||||
class GreedyExecutionStrategy(ExecutionStrategy):
|
||||
"""The common execution strategy. This will greedily run all engine steps without stopping."""
|
||||
|
||||
def do_engine_steps(
|
||||
self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None
|
||||
) -> None:
|
||||
bpmn_process_instance.do_engine_steps(
|
||||
exit_at=exit_at,
|
||||
will_complete_task=self.delegate.will_complete_task,
|
||||
did_complete_task=self.delegate.did_complete_task,
|
||||
)
|
||||
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||
|
||||
|
||||
class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy):
|
||||
"""For illustration purposes, not currently integrated.
|
||||
|
||||
Would allow the `run` from the UI to execute until a service task then
|
||||
return (to an interstitial page). The background processor would then take over.
|
||||
"""
|
||||
|
||||
def do_engine_steps(
|
||||
self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None
|
||||
) -> None:
|
||||
engine_steps = list(
|
||||
[
|
||||
t
|
||||
for t in bpmn_process_instance.get_tasks(TaskState.READY)
|
||||
if bpmn_process_instance._is_engine_task(t.task_spec)
|
||||
]
|
||||
)
|
||||
while engine_steps:
|
||||
for spiff_task in engine_steps:
|
||||
if spiff_task.task_spec.spec_type == "Service Task":
|
||||
return
|
||||
self.delegate.will_complete_task(spiff_task)
|
||||
spiff_task.complete()
|
||||
self.delegate.did_complete_task(spiff_task)
|
||||
|
||||
engine_steps = list(
|
||||
[
|
||||
t
|
||||
for t in bpmn_process_instance.get_tasks(TaskState.READY)
|
||||
if bpmn_process_instance._is_engine_task(t.task_spec)
|
||||
]
|
||||
)
|
||||
|
||||
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||
|
||||
|
||||
def execution_strategy_named(
|
||||
name: str, delegate: EngineStepDelegate
|
||||
) -> ExecutionStrategy:
|
||||
cls = {
|
||||
"greedy": GreedyExecutionStrategy,
|
||||
"run_until_service_task": RunUntilServiceTaskExecutionStrategy,
|
||||
}[name]
|
||||
|
||||
return cls(delegate)
|
||||
|
||||
|
||||
ProcessInstanceCompleter = Callable[[BpmnWorkflow], None]
|
||||
ProcessInstanceSaver = Callable[[], None]
|
||||
|
||||
|
||||
class WorkflowExecutionService:
|
||||
"""Provides the driver code for workflow execution."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bpmn_process_instance: BpmnWorkflow,
|
||||
process_instance_model: ProcessInstanceModel,
|
||||
execution_strategy: ExecutionStrategy,
|
||||
process_instance_completer: ProcessInstanceCompleter,
|
||||
process_instance_saver: ProcessInstanceSaver,
|
||||
):
|
||||
"""__init__."""
|
||||
self.bpmn_process_instance = bpmn_process_instance
|
||||
self.process_instance_model = process_instance_model
|
||||
self.execution_strategy = execution_strategy
|
||||
self.process_instance_completer = process_instance_completer
|
||||
self.process_instance_saver = process_instance_saver
|
||||
|
||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""Do_engine_steps."""
|
||||
with safe_assertion(
|
||||
ProcessInstanceLockService.has_lock(self.process_instance_model.id)
|
||||
) as tripped:
|
||||
if tripped:
|
||||
raise AssertionError(
|
||||
"The current thread has not obtained a lock for this process"
|
||||
f" instance ({self.process_instance_model.id})."
|
||||
)
|
||||
|
||||
try:
|
||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||
|
||||
# TODO: implicit re-entrant locks here `with_dequeued`
|
||||
self.execution_strategy.do_engine_steps(self.bpmn_process_instance, exit_at)
|
||||
|
||||
if self.bpmn_process_instance.is_completed():
|
||||
self.process_instance_completer(self.bpmn_process_instance)
|
||||
|
||||
self.process_bpmn_messages()
|
||||
self.queue_waiting_receive_messages()
|
||||
except SpiffWorkflowException as swe:
|
||||
raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe
|
||||
|
||||
finally:
|
||||
self.execution_strategy.save()
|
||||
spiff_logger = logging.getLogger("spiff")
|
||||
for handler in spiff_logger.handlers:
|
||||
if hasattr(handler, "bulk_insert_logs"):
|
||||
handler.bulk_insert_logs() # type: ignore
|
||||
db.session.commit()
|
||||
|
||||
if save:
|
||||
self.process_instance_saver()
|
||||
|
||||
def process_bpmn_messages(self) -> None:
|
||||
"""Process_bpmn_messages."""
|
||||
bpmn_messages = self.bpmn_process_instance.get_bpmn_messages()
|
||||
for bpmn_message in bpmn_messages:
|
||||
message_instance = MessageInstanceModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
user_id=self.process_instance_model.process_initiator_id, # TODO: use the correct swimlane user when that is set up
|
||||
message_type="send",
|
||||
name=bpmn_message.name,
|
||||
payload=bpmn_message.payload,
|
||||
correlation_keys=self.bpmn_process_instance.correlations,
|
||||
)
|
||||
db.session.add(message_instance)
|
||||
|
||||
bpmn_process = self.process_instance_model.bpmn_process
|
||||
if bpmn_process is not None:
|
||||
bpmn_process_correlations = self.bpmn_process_instance.correlations
|
||||
bpmn_process.properties_json["correlations"] = bpmn_process_correlations
|
||||
db.session.add(bpmn_process)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
def queue_waiting_receive_messages(self) -> None:
|
||||
"""Queue_waiting_receive_messages."""
|
||||
waiting_events = self.bpmn_process_instance.waiting_events()
|
||||
waiting_message_events = filter(
|
||||
lambda e: e["event_type"] == "Message", waiting_events
|
||||
)
|
||||
|
||||
for event in waiting_message_events:
|
||||
# Ensure we are only creating one message instance for each waiting message
|
||||
if (
|
||||
MessageInstanceModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
message_type="receive",
|
||||
name=event["name"],
|
||||
).count()
|
||||
> 0
|
||||
):
|
||||
continue
|
||||
|
||||
# Create a new Message Instance
|
||||
message_instance = MessageInstanceModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
user_id=self.process_instance_model.process_initiator_id,
|
||||
message_type="receive",
|
||||
name=event["name"],
|
||||
correlation_keys=self.bpmn_process_instance.correlations,
|
||||
)
|
||||
for correlation_property in event["value"]:
|
||||
message_correlation = MessageInstanceCorrelationRuleModel(
|
||||
message_instance_id=message_instance.id,
|
||||
name=correlation_property.name,
|
||||
retrieval_expression=correlation_property.retrieval_expression,
|
||||
)
|
||||
message_instance.correlation_rules.append(message_correlation)
|
||||
db.session.add(message_instance)
|
||||
|
||||
bpmn_process = self.process_instance_model.bpmn_process
|
||||
|
||||
if bpmn_process is not None:
|
||||
bpmn_process_correlations = self.bpmn_process_instance.correlations
|
||||
bpmn_process.properties_json["correlations"] = bpmn_process_correlations
|
||||
db.session.add(bpmn_process)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
|
||||
class ProfiledWorkflowExecutionService(WorkflowExecutionService):
|
||||
"""A profiled version of the workflow execution service."""
|
||||
|
||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""__do_engine_steps."""
|
||||
import cProfile
|
||||
from pstats import SortKey
|
||||
|
||||
with cProfile.Profile() as pr:
|
||||
super().do_engine_steps(exit_at=exit_at, save=save)
|
||||
pr.print_stats(sort=SortKey.CUMULATIVE)
|
|
@ -0,0 +1,77 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:collaboration id="Collaboration_SystemMessageNotification">
|
||||
<bpmn:participant id="Participant_MessageReceiver" name="Message Receiver" processRef="Process_MessageReceiverNotification" />
|
||||
<bpmn:participant id="Participant_MessageSender" name="Message Sender" />
|
||||
<bpmn:messageFlow id="Flow_1lktxcr" sourceRef="Participant_MessageSender" targetRef="StartEvent_1" />
|
||||
<bpmn:correlationKey name="error_details">
|
||||
<bpmn:correlationPropertyRef>message_text</bpmn:correlationPropertyRef>
|
||||
<bpmn:correlationPropertyRef>recipients</bpmn:correlationPropertyRef>
|
||||
</bpmn:correlationKey>
|
||||
</bpmn:collaboration>
|
||||
<bpmn:process id="Process_MessageReceiverNotification" name="Message Receiver" isExecutable="true">
|
||||
<bpmn:sequenceFlow id="Flow_1wwg6l1" sourceRef="StartEvent_1" targetRef="Activity_1twstnr" />
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1wwg6l1</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1kqg8ba" messageRef="Message_SystemMessageNotification" />
|
||||
</bpmn:startEvent>
|
||||
<bpmn:endEvent id="Event_1rn093f">
|
||||
<bpmn:incoming>Flow_1hpekd5</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1hpekd5" sourceRef="Activity_1twstnr" targetRef="Event_1rn093f" />
|
||||
<bpmn:scriptTask id="Activity_1twstnr" name="Simple Script Task">
|
||||
<bpmn:incoming>Flow_1wwg6l1</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1hpekd5</bpmn:outgoing>
|
||||
<bpmn:script>x = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmn:message id="Message_SystemMessageNotification" name="SystemErrorMessage">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:messageVariable>system_message</spiffworkflow:messageVariable>
|
||||
</bpmn:extensionElements>
|
||||
</bpmn:message>
|
||||
<bpmn:correlationProperty id="message_text" name="message_text">
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="Message_SystemMessageNotification">
|
||||
<bpmn:formalExpression>message_text</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
</bpmn:correlationProperty>
|
||||
<bpmn:correlationProperty id="recipients" name="recipients">
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="Message_SystemMessageNotification">
|
||||
<bpmn:formalExpression>recipients</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
</bpmn:correlationProperty>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_SystemMessageNotification">
|
||||
<bpmndi:BPMNShape id="Participant_0hdwpzk_di" bpmnElement="Participant_MessageReceiver" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="80" width="570" height="180" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1rpzksh_di" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1rn093f_di" bpmnElement="Event_1rn093f">
|
||||
<dc:Bounds x="522" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_13sbt91_di" bpmnElement="Activity_1twstnr">
|
||||
<dc:Bounds x="400" y="137" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1wwg6l1_di" bpmnElement="Flow_1wwg6l1">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="400" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1hpekd5_di" bpmnElement="Flow_1hpekd5">
|
||||
<di:waypoint x="500" y="177" />
|
||||
<di:waypoint x="522" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Participant_04vc6oc_di" bpmnElement="Participant_MessageSender" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="-40" width="390" height="60" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1lktxcr_di" bpmnElement="Flow_1lktxcr">
|
||||
<di:waypoint x="197" y="20" />
|
||||
<di:waypoint x="197" y="159" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -2,39 +2,52 @@
|
|||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_ManualTask" name="Manual Task" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1xlck7g</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0stlaxe</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1xlck7g" sourceRef="StartEvent_1" targetRef="Activity_Hello" />
|
||||
<bpmn:endEvent id="Event_0ia26nb">
|
||||
<bpmn:endEvent id="end_event_of_manual_task_model">
|
||||
<bpmn:incoming>Flow_0nnh2x9</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0nnh2x9" sourceRef="Activity_Hello" targetRef="Event_0ia26nb" />
|
||||
<bpmn:sequenceFlow id="Flow_0nnh2x9" sourceRef="Activity_Hello" targetRef="end_event_of_manual_task_model" />
|
||||
<bpmn:manualTask id="Activity_Hello" name="Hello">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:instructionsForEndUser>## Hello</spiffworkflow:instructionsForEndUser>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1xlck7g</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_1pmem7s</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0nnh2x9</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:sequenceFlow id="Flow_0stlaxe" sourceRef="StartEvent_1" targetRef="the_script" />
|
||||
<bpmn:sequenceFlow id="Flow_1pmem7s" sourceRef="the_script" targetRef="Activity_Hello" />
|
||||
<bpmn:scriptTask id="the_script">
|
||||
<bpmn:incoming>Flow_0stlaxe</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1pmem7s</bpmn:outgoing>
|
||||
<bpmn:script>the_new_var = "HEY"</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_ManualTask">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0ia26nb_di" bpmnElement="Event_0ia26nb">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNShape id="Event_0ia26nb_di" bpmnElement="end_event_of_manual_task_model">
|
||||
<dc:Bounds x="592" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="Activity_Hello">
|
||||
<dc:Bounds x="420" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1vokg57_di" bpmnElement="the_script">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1xlck7g_di" bpmnElement="Flow_1xlck7g">
|
||||
<bpmndi:BPMNEdge id="Flow_0nnh2x9_di" bpmnElement="Flow_0nnh2x9">
|
||||
<di:waypoint x="520" y="177" />
|
||||
<di:waypoint x="592" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0stlaxe_di" bpmnElement="Flow_0stlaxe">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0nnh2x9_di" bpmnElement="Flow_0nnh2x9">
|
||||
<bpmndi:BPMNEdge id="Flow_1pmem7s_di" bpmnElement="Flow_1pmem7s">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
<di:waypoint x="420" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
|
|
|
@ -0,0 +1,154 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="top_level_process" name="Manual Task" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0stlaxe</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:endEvent id="end_event_of_manual_task_model">
|
||||
<bpmn:incoming>Flow_1ygcsbt</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:manualTask id="manual_task" name="Hello">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:instructionsForEndUser>## Hello</spiffworkflow:instructionsForEndUser>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1fktmf7</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_1t9ywmr</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_09gjylo</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:sequenceFlow id="Flow_0stlaxe" sourceRef="StartEvent_1" targetRef="top_level_script" />
|
||||
<bpmn:scriptTask id="top_level_script">
|
||||
<bpmn:incoming>Flow_0stlaxe</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1fktmf7</bpmn:outgoing>
|
||||
<bpmn:script>set_in_top_level_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1fktmf7" sourceRef="top_level_script" targetRef="manual_task" />
|
||||
<bpmn:sequenceFlow id="Flow_1i7syph" sourceRef="top_level_subprocess" targetRef="top_level_call_activity" />
|
||||
<bpmn:sequenceFlow id="Flow_09gjylo" sourceRef="manual_task" targetRef="top_level_subprocess" />
|
||||
<bpmn:subProcess id="top_level_subprocess">
|
||||
<bpmn:incoming>Flow_09gjylo</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1i7syph</bpmn:outgoing>
|
||||
<bpmn:startEvent id="Event_0g7txdo">
|
||||
<bpmn:outgoing>Flow_00k1tii</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_00k1tii" sourceRef="Event_0g7txdo" targetRef="top_level_subprocess_script" />
|
||||
<bpmn:endEvent id="Event_0zi0szr">
|
||||
<bpmn:incoming>Flow_1b4o55k</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1b4o55k" sourceRef="top_level_subprocess_script" targetRef="Event_0zi0szr" />
|
||||
<bpmn:scriptTask id="top_level_subprocess_script">
|
||||
<bpmn:incoming>Flow_00k1tii</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1b4o55k</bpmn:outgoing>
|
||||
<bpmn:script>set_in_top_level_subprocess = 1
|
||||
|
||||
try:
|
||||
a = set_in_test_process_to_call_script
|
||||
we_move_on = True
|
||||
except:
|
||||
we_move_on = False</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:subProcess>
|
||||
<bpmn:callActivity id="top_level_call_activity" calledElement="test_process_to_call">
|
||||
<bpmn:incoming>Flow_1i7syph</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_187mcqe</bpmn:outgoing>
|
||||
</bpmn:callActivity>
|
||||
<bpmn:exclusiveGateway id="Gateway_0p8naw0" default="Flow_1t9ywmr">
|
||||
<bpmn:incoming>Flow_187mcqe</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0lw7sda</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_1t9ywmr</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_187mcqe" sourceRef="top_level_call_activity" targetRef="Gateway_0p8naw0" />
|
||||
<bpmn:sequenceFlow id="Flow_0lw7sda" sourceRef="Gateway_0p8naw0" targetRef="top_level_process_script_after_gate">
|
||||
<bpmn:conditionExpression>we_move_on == True</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_1ygcsbt" sourceRef="top_level_process_script_after_gate" targetRef="end_event_of_manual_task_model" />
|
||||
<bpmn:scriptTask id="top_level_process_script_after_gate">
|
||||
<bpmn:incoming>Flow_0lw7sda</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ygcsbt</bpmn:outgoing>
|
||||
<bpmn:script>set_top_level_process_script_after_gate = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1t9ywmr" sourceRef="Gateway_0p8naw0" targetRef="manual_task" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top_level_process">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0ia26nb_di" bpmnElement="end_event_of_manual_task_model">
|
||||
<dc:Bounds x="1092" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="manual_task">
|
||||
<dc:Bounds x="400" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1vokg57_di" bpmnElement="top_level_script">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_19a46sv_di" bpmnElement="top_level_subprocess">
|
||||
<dc:Bounds x="530" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_04hrmow_di" bpmnElement="top_level_call_activity">
|
||||
<dc:Bounds x="680" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0p8naw0_di" bpmnElement="Gateway_0p8naw0" isMarkerVisible="true">
|
||||
<dc:Bounds x="835" y="152" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1yhtryv_di" bpmnElement="top_level_process_script_after_gate">
|
||||
<dc:Bounds x="940" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0stlaxe_di" bpmnElement="Flow_0stlaxe">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1fktmf7_di" bpmnElement="Flow_1fktmf7">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="400" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1i7syph_di" bpmnElement="Flow_1i7syph">
|
||||
<di:waypoint x="630" y="177" />
|
||||
<di:waypoint x="680" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_09gjylo_di" bpmnElement="Flow_09gjylo">
|
||||
<di:waypoint x="500" y="177" />
|
||||
<di:waypoint x="530" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_187mcqe_di" bpmnElement="Flow_187mcqe">
|
||||
<di:waypoint x="780" y="177" />
|
||||
<di:waypoint x="835" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0lw7sda_di" bpmnElement="Flow_0lw7sda">
|
||||
<di:waypoint x="885" y="177" />
|
||||
<di:waypoint x="940" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1ygcsbt_di" bpmnElement="Flow_1ygcsbt">
|
||||
<di:waypoint x="1040" y="177" />
|
||||
<di:waypoint x="1092" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1t9ywmr_di" bpmnElement="Flow_1t9ywmr">
|
||||
<di:waypoint x="860" y="152" />
|
||||
<di:waypoint x="860" y="100" />
|
||||
<di:waypoint x="450" y="100" />
|
||||
<di:waypoint x="450" y="137" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_01cbxj3">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_07qyo6y" bpmnElement="top_level_subprocess">
|
||||
<bpmndi:BPMNShape id="Event_0g7txdo_di" bpmnElement="Event_0g7txdo">
|
||||
<dc:Bounds x="362" y="132" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0zi0szr_di" bpmnElement="Event_0zi0szr">
|
||||
<dc:Bounds x="562" y="132" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0g000aa_di" bpmnElement="top_level_subprocess_script">
|
||||
<dc:Bounds x="430" y="110" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_00k1tii_di" bpmnElement="Flow_00k1tii">
|
||||
<di:waypoint x="398" y="150" />
|
||||
<di:waypoint x="430" y="150" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1b4o55k_di" bpmnElement="Flow_1b4o55k">
|
||||
<di:waypoint x="530" y="150" />
|
||||
<di:waypoint x="562" y="150" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,39 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="test_process_to_call" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_06g687y</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_06g687y" sourceRef="StartEvent_1" targetRef="test_process_to_call_script" />
|
||||
<bpmn:endEvent id="Event_1nn875f">
|
||||
<bpmn:incoming>Flow_01e21r0</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_01e21r0" sourceRef="test_process_to_call_script" targetRef="Event_1nn875f" />
|
||||
<bpmn:scriptTask id="test_process_to_call_script">
|
||||
<bpmn:incoming>Flow_06g687y</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_01e21r0</bpmn:outgoing>
|
||||
<bpmn:script>set_in_test_process_to_call_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="test_process_to_call">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1nn875f_di" bpmnElement="Event_1nn875f">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_059upl6_di" bpmnElement="test_process_to_call_script">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_06g687y_di" bpmnElement="Flow_06g687y">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_01e21r0_di" bpmnElement="Flow_01e21r0">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -25,6 +25,9 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
|
|||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import (
|
||||
ProcessInstanceQueueService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
@ -308,6 +311,9 @@ class BaseTest:
|
|||
)
|
||||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
|
||||
ProcessInstanceQueueService.enqueue(process_instance)
|
||||
|
||||
return process_instance
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
import pytest
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend import db
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
class TestErrorHandlingService(BaseTest):
|
||||
"""Error Handling does some crazy stuff man.
|
||||
|
||||
Like it can fire off BPMN messages in case a BPMN Task is waiting for that message.
|
||||
"""
|
||||
|
||||
def run_process_model_and_handle_error(
|
||||
self, process_model: ProcessModelInfo, user: UserModel
|
||||
) -> ProcessInstanceModel:
|
||||
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model.id, user
|
||||
)
|
||||
pip = ProcessInstanceProcessor(process_instance)
|
||||
with pytest.raises(ApiError) as e:
|
||||
pip.do_engine_steps(save=True)
|
||||
ErrorHandlingService().handle_error(pip, e.value)
|
||||
return process_instance
|
||||
|
||||
def test_handle_error_suspends_or_faults_process(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Process Model in DB marked as suspended when error occurs."""
|
||||
process_model = load_test_spec(
|
||||
"test_group/error_suspend",
|
||||
process_model_source_directory="error",
|
||||
bpmn_file_name="error.bpmn", # Slightly misnamed, it sends and receives
|
||||
)
|
||||
|
||||
# Process instance should be marked as errored by default.
|
||||
process_instance = self.run_process_model_and_handle_error(
|
||||
process_model, with_super_admin_user
|
||||
)
|
||||
assert ProcessInstanceStatus.error.value == process_instance.status
|
||||
|
||||
# If process model should be suspended on error, then that is what should happen.
|
||||
process_model.fault_or_suspend_on_exception = "suspend"
|
||||
ProcessModelService.save_process_model(process_model)
|
||||
process_instance = self.run_process_model_and_handle_error(
|
||||
process_model, with_super_admin_user
|
||||
)
|
||||
assert ProcessInstanceStatus.suspended.value == process_instance.status
|
||||
|
||||
def test_error_sends_bpmn_message(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Real BPMN Messages should get generated and processes should fire off and complete."""
|
||||
process_model = load_test_spec(
|
||||
"test_group/error_send_message_bpmn",
|
||||
process_model_source_directory="error",
|
||||
bpmn_file_name="error.bpmn", # Slightly misnamed, it sends and receives
|
||||
)
|
||||
""" Process Model that will listen for errors sent."""
|
||||
load_test_spec(
|
||||
"test_group/admin_tools/error_handler",
|
||||
process_model_source_directory="error",
|
||||
bpmn_file_name="error_handler.bpmn", # Slightly misnamed, it sends and receives
|
||||
)
|
||||
process_model.exception_notification_addresses = [
|
||||
"dan@ILoveToReadErrorsInMyEmails.com"
|
||||
]
|
||||
ProcessModelService.save_process_model(process_model)
|
||||
# kick off the process and assure it got marked as an error.
|
||||
process_instance = self.run_process_model_and_handle_error(
|
||||
process_model, with_super_admin_user
|
||||
)
|
||||
assert ProcessInstanceStatus.error.value == process_instance.status
|
||||
|
||||
# Both send and receive messages should be generated, matched
|
||||
# and considered complete.
|
||||
messages = db.session.query(MessageInstanceModel).all()
|
||||
assert 2 == len(messages)
|
||||
assert "completed" == messages[0].status
|
||||
assert "completed" == messages[1].status
|
|
@ -1,4 +1,6 @@
|
|||
"""Test_process_instance_processor."""
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
from flask import g
|
||||
from flask.app import Flask
|
||||
|
@ -10,20 +12,18 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
|||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.authorization_service import (
|
||||
UserDoesNotHaveAccessToTaskError,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceIsAlreadyLockedError,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceLockedBySomethingElseError,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import (
|
||||
ProcessInstanceIsAlreadyLockedError,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
|
@ -292,6 +292,108 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert spiff_task is not None
|
||||
assert spiff_task.state == TaskState.COMPLETED
|
||||
|
||||
def test_properly_saves_tasks_when_running(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_does_not_recreate_human_tasks_on_multiple_saves."""
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, "test_group", "test_group"
|
||||
)
|
||||
initiator_user = self.find_or_create_user("initiator_user")
|
||||
finance_user_three = self.find_or_create_user("testuser3")
|
||||
assert initiator_user.principal is not None
|
||||
assert finance_user_three.principal is not None
|
||||
AuthorizationService.import_permissions_from_yaml_file()
|
||||
|
||||
finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
|
||||
assert finance_group is not None
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id="test_group/manual_task_with_subprocesses",
|
||||
process_model_source_directory="manual_task_with_subprocesses",
|
||||
)
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=initiator_user
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
initial_human_task_id = process_instance.active_human_tasks[0].id
|
||||
|
||||
# save again to ensure we go attempt to process the human tasks again
|
||||
processor.save()
|
||||
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||
human_task_one.task_name, processor.bpmn_process_instance
|
||||
)
|
||||
ProcessInstanceService.complete_form_task(
|
||||
processor, spiff_manual_task, {}, initiator_user, human_task_one
|
||||
)
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance.id
|
||||
).first()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.bpmn_process_instance.get_task(
|
||||
UUID(human_task_one.task_id)
|
||||
)
|
||||
ProcessInstanceService.complete_form_task(
|
||||
processor, spiff_manual_task, {}, initiator_user, human_task_one
|
||||
)
|
||||
|
||||
# recreate variables to ensure all bpmn json was recreated from scratch from the db
|
||||
process_instance_relookup = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance.id
|
||||
).first()
|
||||
processor_final = ProcessInstanceProcessor(process_instance_relookup)
|
||||
assert process_instance_relookup.status == "complete"
|
||||
|
||||
# first_data_set = {"set_in_top_level_script": 1}
|
||||
# second_data_set = {**first_data_set, **{"set_in_top_level_subprocess": 1}}
|
||||
# third_data_set = {
|
||||
# **second_data_set,
|
||||
# **{"set_in_test_process_to_call_script": 1},
|
||||
# }
|
||||
# expected_task_data = {
|
||||
# "top_level_script": first_data_set,
|
||||
# "manual_task": first_data_set,
|
||||
# "top_level_subprocess_script": second_data_set,
|
||||
# "top_level_subprocess": second_data_set,
|
||||
# "test_process_to_call_script": third_data_set,
|
||||
# "top_level_call_activity": third_data_set,
|
||||
# "end_event_of_manual_task_model": third_data_set,
|
||||
# }
|
||||
|
||||
all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks()
|
||||
assert len(all_spiff_tasks) > 1
|
||||
for spiff_task in all_spiff_tasks:
|
||||
assert spiff_task.state == TaskState.COMPLETED
|
||||
# FIXME: Checking task data cannot work with the feature/remove-loop-reset branch
|
||||
# of SiffWorkflow. This is because it saves script data to the python_env and NOT
|
||||
# to task.data. We may need to either create a new column on TaskModel to put the python_env
|
||||
# data or we could just shove it back onto the task data when adding to the database.
|
||||
# Right now everything works in practice because the python_env data is on the top level workflow
|
||||
# and so is always there but is also always the most recent. If we want to replace spiff_step_details
|
||||
# with TaskModel then we'll need some way to store python_env on each task.
|
||||
# spiff_task_name = spiff_task.task_spec.name
|
||||
# if spiff_task_name in expected_task_data:
|
||||
# spiff_task_data = expected_task_data[spiff_task_name]
|
||||
# failure_message = (
|
||||
# f"Found unexpected task data on {spiff_task_name}. "
|
||||
# f"Expected: {spiff_task_data}, Found: {spiff_task.data}"
|
||||
# )
|
||||
# assert spiff_task.data == spiff_task_data, failure_message
|
||||
|
||||
def test_does_not_recreate_human_tasks_on_multiple_saves(
|
||||
self,
|
||||
app: Flask,
|
||||
|
@ -331,7 +433,8 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert len(process_instance.active_human_tasks) == 1
|
||||
assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
|
||||
def test_it_can_lock_and_unlock_a_process_instance(
|
||||
# TODO: port this test to queue_service test
|
||||
def xxx_test_it_can_lock_and_unlock_a_process_instance(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
|
@ -360,8 +463,8 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
with pytest.raises(ProcessInstanceIsAlreadyLockedError):
|
||||
processor.lock_process_instance("TEST")
|
||||
|
||||
with pytest.raises(ProcessInstanceLockedBySomethingElseError):
|
||||
processor.unlock_process_instance("TEST2")
|
||||
# with pytest.raises(ProcessInstanceLockedBySomethingElseError):
|
||||
# processor.unlock_process_instance("TEST2")
|
||||
|
||||
processor.unlock_process_instance("TEST")
|
||||
|
||||
|
@ -407,4 +510,7 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
# this is just asserting the way the functionality currently works in spiff.
|
||||
# we would actually expect this to change one day if we stop reusing the same guid
|
||||
# when we re-do a task.
|
||||
assert human_task_two.task_id == human_task_one.task_id
|
||||
# assert human_task_two.task_id == human_task_one.task_id
|
||||
|
||||
# EDIT: when using feature/remove-loop-reset branch of SpiffWorkflow, these should be different.
|
||||
assert human_task_two.task_id != human_task_one.task_id
|
||||
|
|
|
@ -17,7 +17,12 @@ else
|
|||
shift
|
||||
fi
|
||||
|
||||
if [[ -z "${ATTEMPTS:-}" ]]; then
|
||||
if [[ -n "${ATTEMPTS:-}" ]]; then
|
||||
if [[ "$command" == "open" ]]; then
|
||||
echo "ATTEMPTS is ignored when running cypress open"
|
||||
ATTEMPTS=1
|
||||
fi
|
||||
else
|
||||
ATTEMPTS=1
|
||||
fi
|
||||
|
||||
|
@ -25,9 +30,15 @@ if [[ -z "${CYPRESS_SPIFFWORKFLOW_FRONTEND_AUTH_WITH_KEYCLOAK:-}" ]]; then
|
|||
export CYPRESS_SPIFFWORKFLOW_FRONTEND_AUTH_WITH_KEYCLOAK=true
|
||||
fi
|
||||
|
||||
cypress_run_file="/var/tmp/cypress_run_$(date +%s)"
|
||||
cypress_run_file="/var/tmp/cypress_run"
|
||||
echo "Recording stats to ${cypress_run_file}"
|
||||
|
||||
if [[ ! -f "$cypress_run_file" ]]; then
|
||||
echo "success,duration,start_time,end_time,frontend_url" >"$cypress_run_file"
|
||||
fi
|
||||
|
||||
frontend_url="${SPIFFWORKFLOW_FRONTEND_URL:-localhost}"
|
||||
|
||||
for attempt in $(seq 1 "$ATTEMPTS" ); do
|
||||
echo "Running attempt: ${attempt}"
|
||||
|
||||
|
@ -37,7 +48,14 @@ for attempt in $(seq 1 "$ATTEMPTS" ); do
|
|||
success="true"
|
||||
fi
|
||||
end_time=$(date +%s)
|
||||
if is_mac; then
|
||||
formatted_start_time=$(date -r "${start_time}" +"%Y-%m-%dT%H-%M-%S")
|
||||
formatted_end_time=$(date -r "${end_time}" +"%Y-%m-%dT%H-%M-%S")
|
||||
else
|
||||
formatted_start_time=$(date "-d@${start_time}" +"%Y-%m-%dT%H-%M-%S")
|
||||
formatted_end_time=$(date "-d@${end_time}" +"%Y-%m-%dT%H-%M-%S")
|
||||
fi
|
||||
|
||||
echo "${success},$(( end_time - start_time ))" >>"$cypress_run_file"
|
||||
echo "${success},$(( end_time - start_time )),${formatted_start_time},${formatted_end_time},${frontend_url}" >>"$cypress_run_file"
|
||||
done
|
||||
echo "Recorded stats to ${cypress_run_file}"
|
||||
|
|
|
@ -32,7 +32,7 @@ if (process.env.SPIFFWORKFLOW_FRONTEND_URL) {
|
|||
|
||||
const cypressConfig = {
|
||||
projectId: 'crax1q',
|
||||
|
||||
defaultCommandTimeout: 10000,
|
||||
videoUploadOnPasses: false,
|
||||
chromeWebSecurity: false,
|
||||
e2e: {
|
||||
|
|
|
@ -158,7 +158,7 @@ describe('process-instances', () => {
|
|||
// make sure we have some process instances
|
||||
cy.runPrimaryBpmnFile();
|
||||
cy.getBySel('process-instance-list-link').click();
|
||||
cy.getBySel('process-instance-show-link').first().click();
|
||||
cy.getBySel('process-instance-show-link-id').first().click();
|
||||
cy.getBySel('process-instance-log-list-link').click();
|
||||
cy.getBySel('process-instance-log-detailed').click();
|
||||
cy.contains('process_model_one');
|
||||
|
|
|
@ -168,7 +168,7 @@ describe('process-models', () => {
|
|||
.click();
|
||||
cy.runPrimaryBpmnFile();
|
||||
|
||||
cy.getBySel('process-instance-show-link').click();
|
||||
cy.getBySel('process-instance-show-link-id').click();
|
||||
cy.getBySel('process-instance-delete').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('process-instance-delete-modal-confirmation-dialog')
|
||||
|
|
|
@ -74,7 +74,7 @@ describe('tasks', () => {
|
|||
cy.assertAtLeastOneItemInPaginatedResults();
|
||||
|
||||
// This should get the first one which should be the one we just completed
|
||||
cy.getBySel('process-instance-show-link').first().click();
|
||||
cy.getBySel('process-instance-show-link-id').first().click();
|
||||
cy.contains('Process Instance Id: ');
|
||||
|
||||
cy.get(`g[data-element-id=form3]`).click();
|
||||
|
@ -106,7 +106,7 @@ describe('tasks', () => {
|
|||
cy.assertAtLeastOneItemInPaginatedResults();
|
||||
|
||||
// This should get the first one which should be the one we just completed
|
||||
cy.getBySel('process-instance-show-link').first().click();
|
||||
cy.getBySel('process-instance-show-link-id').first().click();
|
||||
cy.contains('Process Instance Id: ');
|
||||
cy.contains('Status: complete');
|
||||
});
|
||||
|
@ -120,6 +120,6 @@ describe('tasks', () => {
|
|||
kickOffModelWithForm();
|
||||
|
||||
cy.navigateToHome();
|
||||
cy.basicPaginationTest('process-instance-show-link');
|
||||
cy.basicPaginationTest('process-instance-show-link-id');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,7 +10,7 @@ const approveWithUser = (
|
|||
.contains(/^Submit$/)
|
||||
.click();
|
||||
|
||||
cy.contains('Tasks I can complete', { timeout: 30000 });
|
||||
cy.contains('Tasks I can complete', { timeout: 60000 });
|
||||
cy.get('.cds--btn').contains(/^Go$/).click();
|
||||
|
||||
// approve!
|
||||
|
@ -19,12 +19,12 @@ const approveWithUser = (
|
|||
.contains(/^Submit$/)
|
||||
.click();
|
||||
if (expectAdditionalApprovalInfoPage) {
|
||||
cy.contains(expectAdditionalApprovalInfoPage, { timeout: 30000 });
|
||||
cy.contains(expectAdditionalApprovalInfoPage, { timeout: 60000 });
|
||||
cy.get('button')
|
||||
.contains(/^Continue$/)
|
||||
.click();
|
||||
}
|
||||
cy.location({ timeout: 30000 }).should((loc) => {
|
||||
cy.location({ timeout: 60000 }).should((loc) => {
|
||||
expect(loc.pathname).to.eq('/tasks');
|
||||
});
|
||||
cy.logout();
|
||||
|
@ -39,15 +39,15 @@ describe('pp1', () => {
|
|||
cy.runPrimaryBpmnFile(true);
|
||||
cy.contains('Please select the type of request to start the process.');
|
||||
// wait a second to ensure we can click the radio button
|
||||
cy.wait(1000);
|
||||
cy.wait(2000);
|
||||
cy.get('input#root-procurement').click();
|
||||
cy.wait(1000);
|
||||
cy.wait(2000);
|
||||
cy.get('button')
|
||||
.contains(/^Submit$/)
|
||||
.click();
|
||||
cy.contains(
|
||||
'Submit a new demand request for the procurement of needed items',
|
||||
{ timeout: 30000 }
|
||||
{ timeout: 60000 }
|
||||
);
|
||||
|
||||
cy.url().then((currentUrl) => {
|
||||
|
@ -68,7 +68,7 @@ describe('pp1', () => {
|
|||
.contains(/^Submit$/)
|
||||
.click();
|
||||
|
||||
cy.contains('Task: Enter NDR Items', { timeout: 30000 });
|
||||
cy.contains('Task: Enter NDR Items', { timeout: 60000 });
|
||||
cy.get('#root_0_sub_category').select('op_src');
|
||||
cy.get('#root_0_item').clear().type('spiffworkflow');
|
||||
cy.get('#root_0_qty').clear().type('1');
|
||||
|
@ -81,14 +81,14 @@ describe('pp1', () => {
|
|||
|
||||
cy.contains(
|
||||
'Review and provide any supporting information or files for your request.',
|
||||
{ timeout: 30000 }
|
||||
{ timeout: 60000 }
|
||||
);
|
||||
cy.contains('Submit the Request').click();
|
||||
cy.get('input[value="Submit the Request"]').click();
|
||||
cy.get('button')
|
||||
.contains(/^Submit$/)
|
||||
.click();
|
||||
cy.contains('Tasks for my open instances', { timeout: 30000 });
|
||||
cy.contains('Tasks for my open instances', { timeout: 60000 });
|
||||
|
||||
cy.logout();
|
||||
approveWithUser(
|
||||
|
|
|
@ -109,7 +109,7 @@ Cypress.Commands.add(
|
|||
if (expectAutoRedirectToHumanTask) {
|
||||
// the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress.
|
||||
cy.url().should('include', `/tasks/`);
|
||||
cy.contains('Task: ', { timeout: 10000 });
|
||||
cy.contains('Task: ', { timeout: 30000 });
|
||||
} else {
|
||||
cy.contains(/Process Instance.*[kK]icked [oO]ff/);
|
||||
cy.reload(true);
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import {
|
||||
Link,
|
||||
useNavigate,
|
||||
useParams,
|
||||
useSearchParams,
|
||||
} from 'react-router-dom';
|
||||
import { useNavigate, useParams, useSearchParams } from 'react-router-dom';
|
||||
|
||||
// @ts-ignore
|
||||
import { Filter, Close, AddAlt } from '@carbon/icons-react';
|
||||
|
@ -42,6 +37,7 @@ import {
|
|||
modifyProcessIdentifierForPathParam,
|
||||
refreshAtInterval,
|
||||
} from '../helpers';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
|
||||
import PaginationForTable from './PaginationForTable';
|
||||
import 'react-datepicker/dist/react-datepicker.css';
|
||||
|
@ -61,14 +57,16 @@ import {
|
|||
ReportFilter,
|
||||
User,
|
||||
ErrorForDisplay,
|
||||
PermissionsToCheck,
|
||||
} from '../interfaces';
|
||||
import ProcessModelSearch from './ProcessModelSearch';
|
||||
import ProcessInstanceReportSearch from './ProcessInstanceReportSearch';
|
||||
import ProcessInstanceListDeleteReport from './ProcessInstanceListDeleteReport';
|
||||
import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport';
|
||||
import { FormatProcessModelDisplayName } from './MiniComponents';
|
||||
import { Notification } from './Notification';
|
||||
import useAPIError from '../hooks/UseApiError';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
import { Can } from '../contexts/Can';
|
||||
|
||||
const REFRESH_INTERVAL = 5;
|
||||
const REFRESH_TIMEOUT = 600;
|
||||
|
@ -113,6 +111,13 @@ export default function ProcessInstanceListTable({
|
|||
const navigate = useNavigate();
|
||||
const { addError, removeError } = useAPIError();
|
||||
|
||||
const { targetUris } = useUriListForPermissions();
|
||||
const permissionRequestData: PermissionsToCheck = {
|
||||
[targetUris.userSearch]: ['GET'],
|
||||
};
|
||||
const { ability } = usePermissionFetcher(permissionRequestData);
|
||||
const canSearchUsers: boolean = ability.can('GET', targetUris.userSearch);
|
||||
|
||||
const [processInstances, setProcessInstances] = useState([]);
|
||||
const [reportMetadata, setReportMetadata] = useState<ReportMetadata | null>();
|
||||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
||||
|
@ -173,6 +178,10 @@ export default function ProcessInstanceListTable({
|
|||
useState<string[]>([]);
|
||||
const [processInitiatorSelection, setProcessInitiatorSelection] =
|
||||
useState<User | null>(null);
|
||||
const [processInitiatorText, setProcessInitiatorText] = useState<
|
||||
string | null
|
||||
>(null);
|
||||
|
||||
const lastRequestedInitatorSearchTerm = useRef<string>();
|
||||
|
||||
const dateParametersToAlwaysFilterBy: dateParameters = useMemo(() => {
|
||||
|
@ -208,7 +217,7 @@ export default function ProcessInstanceListTable({
|
|||
};
|
||||
|
||||
const searchForProcessInitiator = (inputText: string) => {
|
||||
if (inputText) {
|
||||
if (inputText && canSearchUsers) {
|
||||
lastRequestedInitatorSearchTerm.current = inputText;
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/users/search?username_prefix=${inputText}`,
|
||||
|
@ -596,6 +605,8 @@ export default function ProcessInstanceListTable({
|
|||
|
||||
if (processInitiatorSelection) {
|
||||
queryParamString += `&process_initiator_username=${processInitiatorSelection.username}`;
|
||||
} else if (processInitiatorText) {
|
||||
queryParamString += `&process_initiator_username=${processInitiatorText}`;
|
||||
}
|
||||
|
||||
const reportColumnsBase64 = encodeBase64(JSON.stringify(reportColumns()));
|
||||
|
@ -691,6 +702,14 @@ export default function ProcessInstanceListTable({
|
|||
setEndFromTime('');
|
||||
setEndToDate('');
|
||||
setEndToTime('');
|
||||
setProcessInitiatorSelection(null);
|
||||
setProcessInitiatorText('');
|
||||
|
||||
if (reportMetadata) {
|
||||
reportMetadata.columns = reportMetadata.columns.filter(
|
||||
(column) => !column.filterable
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const processInstanceReportDidChange = (selection: any, mode?: string) => {
|
||||
|
@ -1083,24 +1102,47 @@ export default function ProcessInstanceListTable({
|
|||
/>
|
||||
</Column>
|
||||
<Column md={4}>
|
||||
<ComboBox
|
||||
onInputChange={searchForProcessInitiator}
|
||||
onChange={(event: any) => {
|
||||
setProcessInitiatorSelection(event.selectedItem);
|
||||
}}
|
||||
id="process-instance-initiator-search"
|
||||
data-qa="process-instance-initiator-search"
|
||||
items={processInstanceInitiatorOptions}
|
||||
itemToString={(processInstanceInitatorOption: User) => {
|
||||
if (processInstanceInitatorOption) {
|
||||
return processInstanceInitatorOption.username;
|
||||
<Can
|
||||
I="GET"
|
||||
a={targetUris.userSearch}
|
||||
ability={ability}
|
||||
passThrough
|
||||
>
|
||||
{(hasAccess: boolean) => {
|
||||
if (hasAccess) {
|
||||
return (
|
||||
<ComboBox
|
||||
onInputChange={searchForProcessInitiator}
|
||||
onChange={(event: any) => {
|
||||
setProcessInitiatorSelection(event.selectedItem);
|
||||
}}
|
||||
id="process-instance-initiator-search"
|
||||
data-qa="process-instance-initiator-search"
|
||||
items={processInstanceInitiatorOptions}
|
||||
itemToString={(processInstanceInitatorOption: User) => {
|
||||
if (processInstanceInitatorOption) {
|
||||
return processInstanceInitatorOption.username;
|
||||
}
|
||||
return null;
|
||||
}}
|
||||
placeholder="Start typing username"
|
||||
titleText="Process Initiator"
|
||||
selectedItem={processInitiatorSelection}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
return (
|
||||
<TextInput
|
||||
id="process-instance-initiator-search"
|
||||
placeholder="Enter username"
|
||||
labelText="Process Initiator"
|
||||
onChange={(event: any) =>
|
||||
setProcessInitiatorText(event.target.value)
|
||||
}
|
||||
/>
|
||||
);
|
||||
}}
|
||||
placeholder="Starting typing username"
|
||||
titleText="Process Initiator"
|
||||
selectedItem={processInitiatorSelection}
|
||||
/>
|
||||
</Can>
|
||||
</Column>
|
||||
<Column md={4}>{processStatusSearch()}</Column>
|
||||
</Grid>
|
||||
|
@ -1203,28 +1245,13 @@ export default function ProcessInstanceListTable({
|
|||
});
|
||||
|
||||
const formatProcessInstanceId = (row: ProcessInstance, id: number) => {
|
||||
const modifiedProcessModelId: String =
|
||||
modifyProcessIdentifierForPathParam(row.process_model_identifier);
|
||||
return (
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
to={`${processInstanceShowPathPrefix}/${modifiedProcessModelId}/${id}`}
|
||||
title={`View process instance ${id}`}
|
||||
>
|
||||
<span data-qa="paginated-entity-id">{id}</span>
|
||||
</Link>
|
||||
);
|
||||
return <span data-qa="paginated-entity-id">{id}</span>;
|
||||
};
|
||||
const formatProcessModelIdentifier = (_row: any, identifier: any) => {
|
||||
return (
|
||||
<Link
|
||||
to={`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
identifier
|
||||
)}`}
|
||||
>
|
||||
{identifier}
|
||||
</Link>
|
||||
);
|
||||
return <span>{identifier}</span>;
|
||||
};
|
||||
const formatProcessModelDisplayName = (_row: any, identifier: any) => {
|
||||
return <span>{identifier}</span>;
|
||||
};
|
||||
|
||||
const formatSecondsForDisplay = (_row: any, seconds: any) => {
|
||||
|
@ -1237,7 +1264,7 @@ export default function ProcessInstanceListTable({
|
|||
const reportColumnFormatters: Record<string, any> = {
|
||||
id: formatProcessInstanceId,
|
||||
process_model_identifier: formatProcessModelIdentifier,
|
||||
process_model_display_name: FormatProcessModelDisplayName,
|
||||
process_model_display_name: formatProcessModelDisplayName,
|
||||
start_in_seconds: formatSecondsForDisplay,
|
||||
end_in_seconds: formatSecondsForDisplay,
|
||||
};
|
||||
|
@ -1245,21 +1272,65 @@ export default function ProcessInstanceListTable({
|
|||
const formatter =
|
||||
reportColumnFormatters[column.accessor] ?? defaultFormatter;
|
||||
const value = row[column.accessor];
|
||||
const modifiedModelId = modifyProcessIdentifierForPathParam(
|
||||
row.process_model_identifier
|
||||
);
|
||||
const navigateToProcessInstance = () => {
|
||||
navigate(
|
||||
`${processInstanceShowPathPrefix}/${modifiedModelId}/${row.id}`
|
||||
);
|
||||
};
|
||||
const navigateToProcessModel = () => {
|
||||
navigate(`/admin/process-models/${modifiedModelId}`);
|
||||
};
|
||||
|
||||
if (column.accessor === 'status') {
|
||||
return (
|
||||
<td data-qa={`process-instance-status-${value}`}>
|
||||
// eslint-disable-next-line jsx-a11y/no-noninteractive-element-interactions
|
||||
<td
|
||||
onClick={navigateToProcessInstance}
|
||||
onKeyDown={navigateToProcessInstance}
|
||||
data-qa={`process-instance-status-${value}`}
|
||||
>
|
||||
{formatter(row, value)}
|
||||
</td>
|
||||
);
|
||||
}
|
||||
return <td>{formatter(row, value)}</td>;
|
||||
if (column.accessor === 'process_model_display_name') {
|
||||
const pmStyle = { background: 'rgba(0, 0, 0, .02)' };
|
||||
return (
|
||||
// eslint-disable-next-line jsx-a11y/no-noninteractive-element-interactions
|
||||
<td
|
||||
style={pmStyle}
|
||||
onClick={navigateToProcessModel}
|
||||
onKeyDown={navigateToProcessModel}
|
||||
>
|
||||
{formatter(row, value)}
|
||||
</td>
|
||||
);
|
||||
}
|
||||
return (
|
||||
// eslint-disable-next-line jsx-a11y/no-noninteractive-element-interactions
|
||||
<td
|
||||
data-qa={`process-instance-show-link-${column.accessor}`}
|
||||
onKeyDown={navigateToProcessModel}
|
||||
onClick={navigateToProcessInstance}
|
||||
>
|
||||
{formatter(row, value)}
|
||||
</td>
|
||||
);
|
||||
};
|
||||
|
||||
const rows = processInstances.map((row: any) => {
|
||||
const currentRow = reportColumns().map((column: any) => {
|
||||
return formattedColumn(row, column);
|
||||
});
|
||||
return <tr key={row.id}>{currentRow}</tr>;
|
||||
const rowStyle = { cursor: 'pointer' };
|
||||
return (
|
||||
<tr style={rowStyle} key={row.id}>
|
||||
{currentRow}
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
|
||||
return (
|
||||
|
|
|
@ -29,6 +29,7 @@ export const useUriListForPermissions = () => {
|
|||
processModelPublishPath: `/v1.0/process-models/${params.process_model_id}/publish`,
|
||||
processModelShowPath: `/v1.0/process-models/${params.process_model_id}`,
|
||||
secretListPath: `/v1.0/secrets`,
|
||||
userSearch: `/v1.0/users/search`,
|
||||
};
|
||||
}, [params]);
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
|||
|
||||
export default function TaskShow() {
|
||||
const [task, setTask] = useState<ProcessInstanceTask | null>(null);
|
||||
const [userTasks, setUserTasks] = useState(null);
|
||||
const [userTasks] = useState(null);
|
||||
const params = useParams();
|
||||
const navigate = useNavigate();
|
||||
const [disabled, setDisabled] = useState(false);
|
||||
|
@ -33,6 +33,8 @@ export default function TaskShow() {
|
|||
useEffect(() => {
|
||||
const processResult = (result: ProcessInstanceTask) => {
|
||||
setTask(result);
|
||||
setDisabled(false);
|
||||
/* Disable call to load previous tasks -- do not display menu.
|
||||
const url = `/v1.0/process-instances/for-me/${modifyProcessIdentifierForPathParam(
|
||||
result.process_model_identifier
|
||||
)}/${params.process_instance_id}/task-info`;
|
||||
|
@ -52,6 +54,7 @@ export default function TaskShow() {
|
|||
addError(error);
|
||||
},
|
||||
});
|
||||
*/
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks/${params.process_instance_id}/${params.task_id}`,
|
||||
|
|
Loading…
Reference in New Issue