Squashed 'spiffworkflow-backend/' changes from 094de3563..031713a61

031713a61 added new test users for status in keycloak w/ burnettk
da25a85b7 added script to add test keycloak users and moved all keycloak stuff to keycloak directory w/ burnettk
74c46c9b4 pin SpiffWorkflow to passing version for CI tests w/ burnettk
3bb4c893f Merge pull request #105 from sartography/feature/avoid_logs_when_call_activity_waiting
b8d664381 disabled flake8 forcing comments to avoid all of the useless comments but still enforcing the formatting of comments if they exist w/ burnettk
9be85cac6 show start events in logs as well and added bpmn process identifiers to log table w/ burnettk
ab5b1f77f downgrade spiff and upgrade some other stuff
b51e1ca59 upgrade certifi to fix security vulnerability
65b9005d4 do not allow overwriting process models and process groups w/ burnettk
70b9ce61c added End Event to simple log view w/ burnettk
1c2fe64f1 pyl w/ burnettk
56de0294a skip failing xml test on windows w/ burnettk
31944bd07 use the correct windows path separator in test file
1e070f87c favor user_input_required over waiting status for process instances w/ burnettk
9ced1b90a fixed broken test w/ burnettk
8ae6929d1 only show milestones for simple log view w/ burnettk
b98908fa3 do not resolve entities when parsing xml w/ burnettk
ecf8acaf5 do not write to logs when a task is inheriting data from the parent w/ burnettk
e81d7b8f5 updated open id url for compose ci
acf115c5f pyl
8a0982ea9 fixed cypress tests
399a6ac69 postgres does not have group_concat, and it is stricter about getting back columns when grouping
c813209c4 Merge pull request #102 from sartography/feature/waku-fault-message
7e5d4d8ce Merge pull request #101 from sartography/task_data_len
4b0ffb17c Merge branch 'main' of github.com:sartography/spiff-arena
ef55abb7a fixed broken test w/ burnettk
9608f65ef Skip refresh steps (#103)
20a48e6ef added tasks table to process instance show page w/ burnettk
f071832e0 Merge branch 'main' into feature/waku-fault-message
57249528d run_pyl changes
30132ed09 Fixed failing test
a0f9d7e39 stop at call activity as well when getting calling subprocesses by child id w/ burnettk
e4ebe4544 Getting ./bin/pyl to pass
cf0a78aef Merge branch 'main' into task_data_len
48083b164 Bump the limit to 1mb
202ba2e8c Better impl
5bf49c386 reorder imports
c53328523 import order
c67872354 unused imports
d9a9eeae8 Merge branch 'main' into feature/waku-fault-message
e98a4540c POC checking cumulative task data len
fe5258780 fixed failing test w/ burnettk
d00d28d95 added detailed area to process instance show page w/ burnettk
910311832 set the domain for the token cookies w/ burnettk
3cae7055a a little more cleanup w/ burnettk
f756453b3 remove several debug print statements
3180353bb logout works now and queryparams are getting passed correctly on login now
0ee732842 pyl
a55b14f53 Merge branch 'main' into feature/waku-fault-message
e7ab8f8b8 Cleaned up the message text
fa299f412 Precommit
999b19062 mypy
6260af4b6 use the cookie from the frontend w/ burnettk
0e50e71db this can run on localhost with cookies w/ burnettk
aa626d743 this somewhat works and sets cookies w/ burnettk
4b5d2d611 debugging cookies w/ burnettk
4f04ed716 updated get_token to actually work
b31f04a65 Removed test for email error handler. Added stub for testing system handler process
77e4e017a lint w/ burnettk
38951f4d6 only load file references when needed to avoid unnecessary xml errors w/ burnettk
018bd8d1c save a process model file after running all validations w/ burnettk
9c45eedf1 Merge remote-tracking branch 'origin/main' into feature/add_some_xml_validations
5804f058e fixed failing tests
8eb4f1ac9 some updates to validate xml when uploading and saving w/ burnettk
63aad8839 pyl
4120deddb Merge branch 'main' into feature/waku-fault-message
1181d4191 Work on System Notification handler
d5fe920af handle subprocesses in navigation
b1b694982 call proceses through setProcesses to ensure we have up to date value and removed debug logs w/ burnettk
35fb0e130 attempting to use correct ids vs paths for windows w/ burnettk
fc580cbba more debug logs w/ burnettk
874d99580 Merge pull request #96 from sartography/feature/fix_docker_script_in_ci
ab81e753c added in debug logging for launching call activity editor w/ burnettk
ff73d5b0f fixed typeguard tests w/ burnettk
4792c66a3 fixed typeguard tests w/ burnettk
b8dda8779 pyl w/ burnettk
c16b59044 fix setting the bpmn dir in start scripts w/ burnettk
b70ef1796 Update process_models_controller to include `fault_or_suspend_on_exception` and `exception_notification_addresses` for Process Model create/update
39d374341 also show skipped tasks in the simplified log w/ burnettk
94c6f4ccd some updates to test w/ burnettk
2ebb3a14c do not allow sending messages to terminated and suspended process instances w/ burnettk
4cad37bf3 pyl w/ burnettk
2f1a11cd5 actually filter by process initiator w/ burnettk
9565d8548 Merge branch 'main' of github.com:sartography/spiff-arena
f1e399c87 highlight tasks even if they are in subprocesses of called activities w/ burnettk
00049fcc5 ensure we are not accidentally using main
9e4b37e7a updated SpiffWorkflow w/ burnettk

git-subtree-dir: spiffworkflow-backend
git-subtree-split: 031713a61add3cadf7a608732134dd4f15d34668
This commit is contained in:
jasquat 2023-01-19 13:44:53 -05:00
parent 38f5b5b37c
commit 1cedc2ea50
65 changed files with 1725 additions and 899 deletions

View File

@ -8,8 +8,11 @@ rst-roles = class,const,func,meth,mod,ref
rst-directives = deprecated
per-file-ignores =
# prefer naming tests descriptively rather than forcing comments
tests/*:S101,D103
# asserts are ok in tests
tests/*:S101
# prefer naming functions descriptively rather than forcing comments
*:D103
bin/keycloak_test_server.py:B950,D
conftest.py:S105

View File

@ -7,10 +7,8 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models"
fi
BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
export BPMN_SPEC_ABSOLUTE_DIR
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run

30
bin/find_sample_process_models Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
exit 1
fi
fi
pushd "$BPMN_SPEC_ABSOLUTE_DIR" >/dev/null 2>&1
if [[ "$(git rev-parse --abbrev-ref HEAD)" == "main" ]]; then
>&2 echo "ERROR: please do not use the main branch of sample-process-models. use dev"
exit 1
fi
popd >/dev/null 2>&1
fi
realpath "$BPMN_SPEC_ABSOLUTE_DIR"

View File

@ -11,8 +11,7 @@ set -o errtrace -o errexit -o nounset -o pipefail
# so we can see what resources that user has access to
# originally from https://medium.com/keycloak/keycloak-jwt-token-using-curl-post-72c9e791ba8c
# btw, meta config endpoint: http://localhost:7002/realms/spiffworkflow/.well-known/openid-configuration
# token exchange described at https://github.com/keycloak/keycloak-documentation/blob/main/securing_apps/topics/token-exchange/token-exchange.adoc
# btw, meta config endpoint: http://localhost:7002/realms/spiffworkflow/.well-known/openid-configuration token exchange described at https://github.com/keycloak/keycloak-documentation/blob/main/securing_apps/topics/token-exchange/token-exchange.adoc
# some UMA stuff at https://github.com/keycloak/keycloak-documentation/blob/main/authorization_services/topics/service-authorization-obtaining-permission.adoc,
# though resource_set docs are elsewhere.
@ -21,11 +20,13 @@ set -o errtrace -o errexit -o nounset -o pipefail
# ./bin/get_token repeat_form_user_1 repeat_form_user_1 # actually has permissions to the resource in this script
# ./bin/get_token ciadmin1 ciadmin1 '%2Fprocess-models'
HOSTNAME=localhost:7002
# KEYCLOAK_BASE_URL=http://localhost:7002
KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org
BACKEND_BASE_URL=http://localhost:7000
# BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
REALM_NAME=spiffworkflow
USERNAME=${1-ciuser1}
PASSWORD=${2-ciuser1}
URI_TO_TEST_AGAINST=${3-'%2Fprocess-models%2Fcategory_number_one%2Fprocess-model-with-repeating-form'}
USERNAME=${1-fin}
PASSWORD=${2-fin}
FRONTEND_CLIENT_ID=spiffworkflow-frontend
BACKEND_CLIENT_ID=spiffworkflow-backend
@ -33,7 +34,7 @@ BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105
SECURE=false
BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64)
KEYCLOAK_URL=http://$HOSTNAME/realms/$REALM_NAME/protocol/openid-connect/token
KEYCLOAK_URL=$KEYCLOAK_BASE_URL/realms/$REALM_NAME/protocol/openid-connect/token
echo "Using Keycloak: $KEYCLOAK_URL"
echo "realm: $REALM_NAME"
@ -49,55 +50,72 @@ else
INSECURE=--insecure
fi
### Basic auth test with backend
result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "username=$USERNAME" \
-d "password=$PASSWORD" \
-d 'grant_type=password' \
-d "client_id=$FRONTEND_CLIENT_ID" \
)
frontend_token=$(jq -r '.access_token' <<< "$result")
result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
-H "Content-Type: application/x-www-form-urlencoded" \
--data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:token-exchange' \
-d "client_id=$BACKEND_CLIENT_ID" \
-d "subject_token=${frontend_token}" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "audience=${BACKEND_CLIENT_ID}" \
)
backend_token=$(jq -r '.access_token' <<< "$result")
curl --fail -v "${BACKEND_BASE_URL}/v1.0/process-groups?per_page=1" -H "Authorization: Bearer $backend_token"
if [[ "$backend_token" != 'null' ]]; then
echo "backend_token: $backend_token"
echo "Getting resource set"
# everything_resource_id='446bdcf4-a3bd-41c7-a0f8-67a225ba6b57'
resource_result=$(curl -s "http://${HOSTNAME}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${URI_TO_TEST_AGAINST}" -H "Authorization: Bearer $backend_token")
# resource_result=$(curl -s "http://${HOSTNAME}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=false&deep=true&max=-1&exactName=false&type=admin" -H "Authorization: Bearer $backend_token")
### Get with frontend and exchange with backend - not configured to work in keycloak atm
# result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
# -H "Content-Type: application/x-www-form-urlencoded" \
# -d "username=$USERNAME" \
# -d "password=$PASSWORD" \
# -d 'grant_type=password' \
# -d "client_id=$FRONTEND_CLIENT_ID" \
# )
# frontend_token=$(jq -r '.access_token' <<< "$result")
#
# result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
# -H "Content-Type: application/x-www-form-urlencoded" \
# --data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:token-exchange' \
# -d "client_id=$BACKEND_CLIENT_ID" \
# -d "subject_token=${frontend_token}" \
# -H "Authorization: Basic $BACKEND_BASIC_AUTH" \
# -d "audience=${BACKEND_CLIENT_ID}" \
# )
# backend_token=$(jq -r '.access_token' <<< "$result")
resource_id_name_pairs=$(jq -r '.[] | "\(._id):\(.name)"' <<<"$resource_result" || echo '')
if [[ -z "$resource_id_name_pairs" || "$resource_id_name_pairs" == "null" ]]; then
>&2 echo "ERROR: Could not find the resource id from the result: ${resource_result}"
exit 1
fi
echo $resource_id_name_pairs
echo "Getting permissions"
for resource_id_name_pair in $resource_id_name_pairs ; do
resource_id=$(awk -F ':' '{print $1}' <<<"$resource_id_name_pair")
resource_name=$(awk -F ':' '{print $2}' <<<"$resource_id_name_pair")
echo "Checking $resource_name"
curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "audience=${BACKEND_CLIENT_ID}" \
--data-urlencode "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" \
-d "permission=${resource_id}" \
-d "subject_token=${backend_token}" \
| jq .
done
else
echo "Failed auth result: $result"
fi
### Check fine grain permissions - does not work currently
# URI_TO_TEST_AGAINST=${3-'%2Fprocess-models%2Fcategory_number_one%2Fprocess-model-with-repeating-form'}
# if [[ "$backend_token" != 'null' ]]; then
# echo "backend_token: $backend_token"
#
# echo "Getting resource set"
# # everything_resource_id='446bdcf4-a3bd-41c7-a0f8-67a225ba6b57'
# resource_result=$(curl -s "${BASE_URL}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${URI_TO_TEST_AGAINST}" -H "Authorization: Bearer $backend_token")
# # resource_result=$(curl -s "${BASE_URL}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=false&deep=true&max=-1&exactName=false&type=admin" -H "Authorization: Bearer $backend_token")
#
# resource_id_name_pairs=$(jq -r '.[] | "\(._id):\(.name)"' <<<"$resource_result" || echo '')
# if [[ -z "$resource_id_name_pairs" || "$resource_id_name_pairs" == "null" ]]; then
# >&2 echo "ERROR: Could not find the resource id from the result: ${resource_result}"
# exit 1
# fi
# echo $resource_id_name_pairs
#
# echo "Getting permissions"
# for resource_id_name_pair in $resource_id_name_pairs ; do
# resource_id=$(awk -F ':' '{print $1}' <<<"$resource_id_name_pair")
# resource_name=$(awk -F ':' '{print $2}' <<<"$resource_id_name_pair")
#
# echo "Checking $resource_name"
# curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
# -H "Content-Type: application/x-www-form-urlencoded" \
# -H "Authorization: Basic $BACKEND_BASIC_AUTH" \
# -d "audience=${BACKEND_CLIENT_ID}" \
# --data-urlencode "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" \
# -d "permission=${resource_id}" \
# -d "subject_token=${backend_token}" \
# | jq .
# done
# else
# echo "Failed auth result: $result"
# fi

View File

@ -1,12 +0,0 @@
{
"web": {
"issuer": "http://localhost:8080/realms/finance",
"auth_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/auth",
"client_id": "myclient",
"client_secret": "OAh6rkjXIiPJDtPOz4459i3VtdlxGcce",
"redirect_uris": ["http://localhost:5005/*"],
"userinfo_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/userinfo",
"token_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/token",
"token_introspection_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/token/introspect"
}
}

View File

@ -1,105 +0,0 @@
# type: ignore
"""keycloak_test_server."""
# ./bin/start_keycloak # starts keycloak on 8080
# pip install flask_oidc
# pip install itsdangerous==2.0.1
# python ./bin/keycloak_test_server.py # starts flask on 5005
import json
import logging
import requests
from flask import Flask
from flask import g
from flask_oidc import OpenIDConnect
logging.basicConfig(level=logging.DEBUG)
app = Flask(__name__)
app.config.update(
{
"SECRET_KEY": "SomethingNotEntirelySecret",
"TESTING": True,
"DEBUG": True,
"OIDC_CLIENT_SECRETS": "bin/keycloak_test_secrets.json",
"OIDC_ID_TOKEN_COOKIE_SECURE": False,
"OIDC_REQUIRE_VERIFIED_EMAIL": False,
"OIDC_USER_INFO_ENABLED": True,
"OIDC_OPENID_REALM": "flask-demo",
"OIDC_SCOPES": ["openid", "email", "profile"],
"OIDC_INTROSPECTION_AUTH_METHOD": "client_secret_post",
}
)
oidc = OpenIDConnect(app)
@app.route("/")
def hello_world():
"""Hello_world."""
if oidc.user_loggedin:
return (
'Hello, %s, <a href="/private">See private</a> '
'<a href="/logout">Log out</a>'
% oidc.user_getfield("preferred_username")
)
else:
return 'Welcome anonymous, <a href="/private">Log in</a>'
@app.route("/private")
@oidc.require_login
def hello_me():
"""Example for protected endpoint that extracts private information from the OpenID Connect id_token.
Uses the accompanied access_token to access a backend service.
"""
info = oidc.user_getinfo(["preferred_username", "email", "sub"])
username = info.get("preferred_username")
email = info.get("email")
user_id = info.get("sub")
if user_id in oidc.credentials_store:
try:
from oauth2client.client import OAuth2Credentials
access_token = OAuth2Credentials.from_json(
oidc.credentials_store[user_id]
).access_token
print("access_token=<%s>" % access_token)
headers = {"Authorization": "Bearer %s" % (access_token)}
# YOLO
greeting = requests.get(
"http://localhost:8080/greeting", headers=headers
).text
except BaseException:
print("Could not access greeting-service")
greeting = "Hello %s" % username
return """{} your email is {} and your user_id is {}!
<ul>
<li><a href="/">Home</a></li>
<li><a href="//localhost:8080/auth/realms/finance/account?referrer=flask-app&referrer_uri=http://localhost:5005/private&">Account</a></li>
</ul>""".format(
greeting,
email,
user_id,
)
@app.route("/api", methods=["POST"])
@oidc.accept_token(require_token=True, scopes_required=["openid"])
def hello_api():
"""OAuth 2.0 protected API endpoint accessible via AccessToken."""
return json.dumps({"hello": "Welcome %s" % g.oidc_token_info["sub"]})
@app.route("/logout")
def logout():
"""Performs local logout by removing the session cookie."""
oidc.logout()
return 'Hi, you have been logged out! <a href="/">Return</a>'
if __name__ == "__main__":
app.run(port=5005)

View File

@ -17,19 +17,8 @@ if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_ENV=development
fi
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
exit 1
fi
fi
export BPMN_SPEC_ABSOLUTE_DIR
fi
BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
export BPMN_SPEC_ABSOLUTE_DIR
export FLASK_SESSION_SECRET_KEY=super_secret_key
export APPLICATION_ROOT="/"
@ -40,7 +29,13 @@ else
export FLASK_DEBUG=1
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
RUN_BACKGROUND_SCHEDULER=false SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
fi
FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
if [[ -z "${RUN_BACKGROUND_SCHEDULER:-}" ]]; then
RUN_BACKGROUND_SCHEDULER=true
fi
# this line blocks
RUN_BACKGROUND_SCHEDULER="${RUN_BACKGROUND_SCHEDULER}" FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
fi

View File

@ -1,10 +0,0 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
curl -v -F key1=value1 -F upload=@localfilename URL

View File

@ -1,26 +0,0 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ "${1:-}" == "c" ]]; then
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{}'
elif grep -qE '^[0-9]$' <<<"${1:-}" ; then
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d "{ \"task_identifier\": \"${1}\"}"
else
./bin/recreate_db clean
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Product Name": "G", "Quantity": "2"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Sleeve Type": "Short"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Continue shopping?": "N"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Method": "Overnight"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Address": "Somewhere"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Place Order": "Y"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Card Number": "MY_CARD"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "2", "answer": {"Was the customer charged?": "Y"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the product available?": "Y"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the order shipped?": "Y"}}' | jq .
fi

View File

@ -54,7 +54,7 @@ services:
- SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development}
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
- OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002}
- OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002/realms/spiffworkflow}
- SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001}
- SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000}
- SPIFFWORKFLOW_BACKEND_PORT=7000

View File

@ -0,0 +1,50 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
user_file_with_one_email_per_line="${1:-}"
if [[ -z "${1:-}" ]]; then
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"
exit 1
fi
KEYCLOAK_BASE_URL=http://localhost:7002
REALM_NAME=master
ADMIN_USERNAME="admin"
ADMIN_PASSWORD="admin"
SECURE=false
KEYCLOAK_URL=$KEYCLOAK_BASE_URL/realms/$REALM_NAME/protocol/openid-connect/token
if [[ $SECURE = 'y' ]]; then
INSECURE=
else
INSECURE=--insecure
fi
# https://www.appsdeveloperblog.com/keycloak-rest-api-create-a-new-user/
result=$(curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
--header 'Content-Type: application/x-www-form-urlencoded' \
--data-urlencode "username=${ADMIN_USERNAME}" \
--data-urlencode "password=${ADMIN_PASSWORD}" \
--data-urlencode 'grant_type=password' \
--data-urlencode 'client_id=admin-cli'
)
backend_token=$(jq -r '.access_token' <<< "$result")
while read -r user_email; do
if [[ -n "$user_email" ]]; then
username=$(awk -F '@' '{print $1}' <<<"$user_email")
credentials='{"type":"password","value":"'"${username}"'","temporary":false}'
curl --fail --location --request POST 'http://localhost:7002/admin/realms/spiffworkflow/users' \
-H 'Content-Type: application/json' \
-H "Authorization: Bearer $backend_token" \
--data-raw '{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']}'
fi
done <"$user_file_with_one_email_per_line"

View File

@ -7,6 +7,8 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
realms="$*"
if [[ -z "$realms" ]]; then
realms="spiffworkflow-realm"
@ -19,7 +21,7 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
for realm in $realms ; do
cp "${local_tmp_dir}/hey/${realm}.json" bin/
cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/"
done
rm -rf "$local_tmp_dir"

View File

@ -39,12 +39,13 @@ docker run \
-e KEYCLOAK_LOGLEVEL=ALL \
-e ROOT_LOGLEVEL=ALL \
-e KEYCLOAK_ADMIN=admin \
-e KEYCLOAK_ADMIN_PASSWORD=admin quay.io/keycloak/keycloak:20.0.1 start-dev \
-e KEYCLOAK_ADMIN_PASSWORD=admin \
quay.io/keycloak/keycloak:20.0.1 start-dev \
-Dkeycloak.profile.feature.token_exchange=enabled \
-Dkeycloak.profile.feature.admin_fine_grained_authz=enabled
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
cp "${script_dir}/spiffworkflow-realm.json" /tmp/spiffworkflow-realm.json
cp "${script_dir}/../realm_exports/spiffworkflow-realm.json" /tmp/spiffworkflow-realm.json
spiff_subdomain="unused-for-local-dev"
perl -pi -e "s/{{SPIFF_SUBDOMAIN}}/${spiff_subdomain}/g" /tmp/spiffworkflow-realm.json
docker cp /tmp/spiffworkflow-realm.json keycloak:/tmp

View File

@ -634,6 +634,46 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "29ba295e-9a70-41f1-bf0d-f02b468397c5",
"createdTimestamp" : 1674148694595,
"username" : "finance.lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "finance.lead@status.im",
"credentials" : [ {
"id" : "8f746fde-0a10-41b4-a973-0b967de73839",
"type" : "password",
"createdDate" : 1674148694661,
"secretData" : "{\"value\":\"vhe8ONTdkYaXLcSr73/4Ey//7U7rxh/0hiGc9S0wp8FV3EUsf+3bQSreDQCTp3DePJInpVCV34d4T0Ij+6Po0A==\",\"salt\":\"s6hEEdUPlULWfqGpxlG+TQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "f6d2488a-446c-493b-bbe8-210ede6f3e42",
"createdTimestamp" : 1674148694899,
"username" : "finance.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "finance.sme@status.im",
"credentials" : [ {
"id" : "faee8eaa-0bf4-4050-8d17-8b6b52f0b7ee",
"type" : "password",
"createdDate" : 1674148694945,
"secretData" : "{\"value\":\"tk78HqSoRT0PAJ45zt2/q6gXRYxvDDIYtLzsVdYM3sHk+tRkgYeXoyKDSyRwHm9AjbM8jFI5yUXPsWck8vemOg==\",\"salt\":\"aR9qgYMx1VUfOrppTDzMmQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "9b46f3be-a81d-4b76-92e6-2ac8462f5ec8",
"createdTimestamp" : 1665688255982,
@ -674,6 +714,26 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "f55135de-7341-459d-8a42-a59f52d05bed",
"createdTimestamp" : 1674148694958,
"username" : "infra.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "infra.sme@status.im",
"credentials" : [ {
"id" : "e1f4368c-ed7c-481c-9426-fc0b8f2bf520",
"type" : "password",
"createdDate" : 1674148695008,
"secretData" : "{\"value\":\"7RHwvrhGAA3EddNNjPaVah+EOg5be0eugiwLLQLGlhFGSdGfg6kiUmPr5wBqBabivXHiSZgv/BiaL5KQ/VmR+A==\",\"salt\":\"HW3yCxErwpKASPvHX8o9Uw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "1561518b-c327-491e-9db3-23c2b5394104",
"createdTimestamp" : 1669303773974,
@ -843,6 +903,46 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "530e99cb-b400-4baf-8ca6-22e64a30ef84",
"createdTimestamp" : 1674148694688,
"username" : "legal.lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal.lead@status.im",
"credentials" : [ {
"id" : "81f3aeca-8316-4a1b-8eb9-2570c062d0df",
"type" : "password",
"createdDate" : 1674148694733,
"secretData" : "{\"value\":\"puCrVcCNrO6P0VF8w0ZSx97RHi/c6NCuSeTidk/tEfSpZyY9x0oz/bkdFJO359HuvhN5HMBQ+CKPNbW1VjOSoA==\",\"salt\":\"ZczpeV+0QJGZG96EfLWYRQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "2a3176a0-8dd5-4223-a3e1-3cac4134e474",
"createdTimestamp" : 1674148695030,
"username" : "legal.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal.sme@status.im",
"credentials" : [ {
"id" : "52fd8bd4-8fc4-4b71-8325-424220ef83af",
"type" : "password",
"createdDate" : 1674148695076,
"secretData" : "{\"value\":\"Rce1M5ph1ITsCguiHlv7YMcDTyofRnSPnOraQskkmeojV+tlUeBBsHV1fTiqJ4f13vE1qtnwC/60vQV8BprsHw==\",\"salt\":\"zFyJq5G2F/pZeLmgKaGoxQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7",
"createdTimestamp" : 1665517010600,
@ -905,6 +1005,26 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "c3ea06ee-c497-48e6-8816-43c8ef68bd8b",
"createdTimestamp" : 1674148694747,
"username" : "program.lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "program.lead@status.im",
"credentials" : [ {
"id" : "393e3cd9-c403-41dd-8562-7edba6acedd3",
"type" : "password",
"createdDate" : 1674148694793,
"secretData" : "{\"value\":\"AD/rFDJcnQNVSZLVnLl6FzdiMSkRFiKiF2L6jyPtnAOAuQ6IivNvDIqiZf98rPuSq1zs8wjeDzFzyXvTYp7Pjg==\",\"salt\":\"T4XlF58M6LNTX8ksxYq8jQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "f3852a7d-8adf-494f-b39d-96ad4c899ee5",
"createdTimestamp" : 1665516926300,
@ -925,6 +1045,26 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "74374cda-1516-48e5-9ef2-1fd7bcee84d3",
"createdTimestamp" : 1674148695088,
"username" : "security.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security.sme@status.im",
"credentials" : [ {
"id" : "43427e80-292e-453f-9968-511a1064729e",
"type" : "password",
"createdDate" : 1674148695133,
"secretData" : "{\"value\":\"HB68S1rm/fef2nY2qpakAyZ0a+OFM0G/Xp+kHNdTQSWZA6fYq8EUzhfTFkUQ5xuTriOesXao0srtFmcCs2Pi8Q==\",\"salt\":\"e8J1O8M7mrDq/jTJXzwYyQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "487d3a85-89dd-4839-957a-c3f6d70551f6",
"createdTimestamp" : 1657115173081,
@ -961,6 +1101,26 @@
},
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "3d45bb85-0a2d-4b15-8a19-d26a5619d359",
"createdTimestamp" : 1674148694810,
"username" : "services.lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "services.lead@status.im",
"credentials" : [ {
"id" : "45607c53-3768-4f76-bda3-4d31b39ffccd",
"type" : "password",
"createdDate" : 1674148694884,
"secretData" : "{\"value\":\"E3GPcOLU56efhBQE7MMZa0OM0FAtgK5kDA9sy65uCwSyaoZGp4ZVUDsIfIkWe+TEEQA5QP5FVJbJhwvdkx3m9w==\",\"salt\":\"dySpiEZxeyb11oQZR2WYVQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
} ],
"scopeMappings" : [ {
"clientScope" : "offline_access",
@ -2174,7 +2334,7 @@
"subType" : "authenticated",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ]
}
}, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@ -2192,7 +2352,7 @@
"subType" : "anonymous",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
"allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
}
}, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@ -2282,7 +2442,7 @@
"internationalizationEnabled" : false,
"supportedLocales" : [ ],
"authenticationFlows" : [ {
"id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2",
"id" : "fd44ea2b-052b-470a-9afd-216390c40d54",
"alias" : "Account verification options",
"description" : "Method with which to verity the existing account",
"providerId" : "basic-flow",
@ -2304,7 +2464,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ddf80243-ec40-4c21-ae94-2967d841f84c",
"id" : "88a96abb-a839-4405-97bf-fa53f5290482",
"alias" : "Authentication Options",
"description" : "Authentication options.",
"providerId" : "basic-flow",
@ -2333,7 +2493,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "4f075680-46b7-49eb-b94c-d7425f105cb9",
"id" : "cbe05604-280f-4304-bda5-ed5245537f4d",
"alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2355,7 +2515,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed",
"id" : "5275913f-e597-4a89-b416-4f9412b9082b",
"alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2377,7 +2537,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "07536fec-8d41-4c73-845f-ca85002022e0",
"id" : "a0afd432-ed89-41c6-be8d-f31834e80ba1",
"alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2399,7 +2559,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "f123f912-71fb-4596-97f9-c0628a59413d",
"id" : "fab45b23-3353-4482-b690-07f3ab177776",
"alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow",
@ -2421,7 +2581,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "03c26cc5-366b-462d-9297-b4016f8d7c57",
"id" : "f5eb0757-f2cd-4d4b-9608-d1b9ae4fd941",
"alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow",
@ -2443,7 +2603,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "1b4f474e-aa64-45cc-90f1-63504585d89c",
"id" : "521586b9-ade0-4f8c-aff6-3d6c357aa6e4",
"alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow",
@ -2466,7 +2626,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "38024dd6-daff-45de-8782-06b07b7bfa56",
"id" : "b21bb98a-9241-4484-966b-6f8294ba2186",
"alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account",
"providerId" : "basic-flow",
@ -2488,7 +2648,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7",
"id" : "7ec2a1f6-37e7-444e-9376-dee7d442ec2f",
"alias" : "browser",
"description" : "browser based authentication",
"providerId" : "basic-flow",
@ -2524,7 +2684,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "92e3571d-ac3e-4e79-a391-5315954e866f",
"id" : "1bc2b251-bf69-40b1-ace2-e3be5037b910",
"alias" : "clients",
"description" : "Base authentication for clients",
"providerId" : "client-flow",
@ -2560,7 +2720,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f",
"id" : "12a854bd-4d8a-49eb-8be5-cfc9d25cba54",
"alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow",
@ -2589,7 +2749,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "95d2f1ff-6907-47ce-a93c-db462fe04844",
"id" : "99ebf3a7-674e-4603-a0cf-8fe4c6dd4cfc",
"alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow",
@ -2604,7 +2764,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "27405ee8-5730-419c-944c-a7c67edd91ce",
"id" : "a241b9b8-9c21-4a47-877a-5a6535678c90",
"alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow",
@ -2627,7 +2787,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8",
"id" : "c9df7ad1-9b59-46ec-a85e-714fd682569c",
"alias" : "forms",
"description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow",
@ -2649,7 +2809,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9",
"id" : "14f21f85-2bcb-4ed6-aaab-1ee237da153f",
"alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow",
@ -2671,7 +2831,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485",
"id" : "bc7e40c0-9172-496b-8db1-3ebc20065887",
"alias" : "registration",
"description" : "registration flow",
"providerId" : "basic-flow",
@ -2687,7 +2847,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "99593c1e-f2a5-4198-ad41-634694259110",
"id" : "ef97f42b-7f32-442c-ab4a-8cb6c873cf1f",
"alias" : "registration form",
"description" : "registration form",
"providerId" : "form-flow",
@ -2723,7 +2883,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4",
"id" : "1ee2b484-3836-466f-9f5b-bbf47abc5ad7",
"alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow",
@ -2759,7 +2919,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8",
"id" : "4918f32e-6780-4ddd-a1a2-c3ae9d8fa598",
"alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow",
@ -2775,13 +2935,13 @@
} ]
} ],
"authenticatorConfig" : [ {
"id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa",
"id" : "5479944f-6198-48df-8a18-4bc0caba5963",
"alias" : "create unique user config",
"config" : {
"require.password.update.after.registration" : "false"
}
}, {
"id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2",
"id" : "fd9f571f-0d6e-4ece-a3e5-fffccc1e4fad",
"alias" : "review profile config",
"config" : {
"update.profile.on.first.login" : "missing"

View File

@ -0,0 +1,9 @@
finance.lead@status.im
legal.lead@status.im
program.lead@status.im
services.lead@status.im
finance.sme@status.im
infra.sme@status.im
legal.sme@status.im
security.sme@status.im

262
poetry.lock generated
View File

@ -248,7 +248,7 @@ zstd = ["zstandard"]
[[package]]
name = "certifi"
version = "2022.9.24"
version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
@ -462,21 +462,6 @@ toml = "*"
conda = ["pyyaml"]
pipenv = ["pipenv"]
[[package]]
name = "ecdsa"
version = "0.18.0"
description = "ECDSA cryptographic signature library (pure python)"
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[package.dependencies]
six = ">=1.9.0"
[package.extras]
gmpy = ["gmpy"]
gmpy2 = ["gmpy2"]
[[package]]
name = "exceptiongroup"
version = "1.0.4"
@ -654,7 +639,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1"
[[package]]
name = "Flask-Cors"
@ -668,6 +653,22 @@ python-versions = "*"
Flask = ">=0.9"
Six = "*"
[[package]]
name = "flask-jwt-extended"
version = "4.4.4"
description = "Extended JWT integration with Flask"
category = "main"
optional = false
python-versions = ">=3.7,<4"
[package.dependencies]
Flask = ">=2.0,<3.0"
PyJWT = ">=2.0,<3.0"
Werkzeug = ">=0.14"
[package.extras]
asymmetric-crypto = ["cryptography (>=3.3.1)"]
[[package]]
name = "Flask-Mail"
version = "0.9.1"
@ -1073,19 +1074,19 @@ python-versions = "*"
[[package]]
name = "mysql-connector-python"
version = "8.0.31"
version = "8.0.32"
description = "MySQL driver written in Python"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
protobuf = ">=3.11.0,<=3.20.1"
protobuf = ">=3.11.0,<=3.20.3"
[package.extras]
compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.15.2)"]
compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.19.0)"]
dns-srv = ["dnspython (>=1.16.0,<=2.1.0)"]
gssapi = ["gssapi (>=1.6.9,<=1.8.1)"]
gssapi = ["gssapi (>=1.6.9,<=1.8.2)"]
[[package]]
name = "nodeenv"
@ -1209,7 +1210,7 @@ wcwidth = "*"
[[package]]
name = "protobuf"
version = "3.20.1"
version = "3.20.3"
description = "Protocol Buffers"
category = "main"
optional = false
@ -1223,14 +1224,6 @@ category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "pyasn1"
version = "0.4.8"
description = "ASN.1 types and codecs"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "pycodestyle"
version = "2.8.0"
@ -1384,41 +1377,6 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
[package.dependencies]
six = ">=1.5"
[[package]]
name = "python-jose"
version = "3.3.0"
description = "JOSE implementation in Python"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
ecdsa = "!=0.15"
pyasn1 = "*"
rsa = "*"
[package.extras]
cryptography = ["cryptography (>=3.4.0)"]
pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"]
pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"]
[[package]]
name = "python-keycloak"
version = "2.6.0"
description = "python-keycloak is a Python package providing access to the Keycloak API."
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
[package.dependencies]
python-jose = ">=3.3.0,<4.0.0"
requests = ">=2.20.0,<3.0.0"
requests-toolbelt = ">=0.9.1,<0.10.0"
urllib3 = ">=1.26.0,<2.0.0"
[package.extras]
docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"]
[[package]]
name = "pytz"
version = "2022.6"
@ -1494,17 +1452,6 @@ urllib3 = ">=1.21.1,<1.27"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-toolbelt"
version = "0.9.1"
description = "A utility belt for advanced users of python-requests"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
requests = ">=2.0.1,<3.0.0"
[[package]]
name = "restrictedpython"
version = "6.0"
@ -1528,17 +1475,6 @@ python-versions = "*"
[package.dependencies]
docutils = ">=0.11,<1.0"
[[package]]
name = "rsa"
version = "4.9"
description = "Pure-Python RSA implementation"
category = "main"
optional = false
python-versions = ">=3.6,<4"
[package.dependencies]
pyasn1 = ">=0.1.3"
[[package]]
name = "ruamel.yaml"
version = "0.17.21"
@ -1850,8 +1786,8 @@ lxml = "*"
[package.source]
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
reference = "be26100bcbef8026e26312c665dae42faf476485"
resolved_reference = "be26100bcbef8026e26312c665dae42faf476485"
[[package]]
name = "SQLAlchemy"
@ -2222,7 +2158,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata]
lock-version = "1.1"
python-versions = ">=3.9,<3.12"
content-hash = "bbbd1c8bdce7f3dd7ec17c62b85dc7c95045fe500a759bb1a89c93add58a2a25"
content-hash = "d804b8cbb34882f92cf19e5e59231aa7eac84764298fe7eae72bd03112e09496"
[metadata.files]
alabaster = [
@ -2324,8 +2260,8 @@ celery = [
{file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"},
]
certifi = [
{file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
{file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
]
cfgv = [
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
@ -2443,10 +2379,6 @@ dparse = [
{file = "dparse-0.6.2-py3-none-any.whl", hash = "sha256:8097076f1dd26c377f30d4745e6ec18fef42f3bf493933b842ac5bafad8c345f"},
{file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"},
]
ecdsa = [
{file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"},
{file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
]
exceptiongroup = [
{file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"},
{file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"},
@ -2494,6 +2426,10 @@ Flask-Cors = [
{file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"},
{file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"},
]
flask-jwt-extended = [
{file = "Flask-JWT-Extended-4.4.4.tar.gz", hash = "sha256:62b521d75494c290a646ae8acc77123721e4364790f1e64af0038d823961fbf0"},
{file = "Flask_JWT_Extended-4.4.4-py2.py3-none-any.whl", hash = "sha256:a85eebfa17c339a7260c4643475af444784ba6de5588adda67406f0a75599553"},
]
Flask-Mail = [
{file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"},
]
@ -2563,7 +2499,6 @@ greenlet = [
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
@ -2572,7 +2507,6 @@ greenlet = [
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
@ -2581,7 +2515,6 @@ greenlet = [
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
@ -2838,32 +2771,31 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
mysql-connector-python = [
{file = "mysql-connector-python-8.0.31.tar.gz", hash = "sha256:0fbe8f5441ad781b4f65c54a10ac77c6a329591456607e042786528599519636"},
{file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e271d8de00d5e9f9bd4b212c8e23d2986dead0f20379010f3b274a3e24cbfcb"},
{file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f3ee04a601f9cb90ace9618bbe2fa8e5bb59be3eb0c2bd8a5405fe69e05e446b"},
{file = "mysql_connector_python-8.0.31-cp310-cp310-manylinux1_i686.whl", hash = "sha256:f89b7a731885b8a04248e4d8d124705ca836f0ddd3b7cf0c789e21f4b32810ed"},
{file = "mysql_connector_python-8.0.31-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:48eb34f4e69a2fba56f310de6682862a15d46cd2bd51ee6eebc3a244e4ee0aa6"},
{file = "mysql_connector_python-8.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:a570a72e0015b36b9c0775ae27c1d4946225f02f62129d16a14e9d77a38c0717"},
{file = "mysql_connector_python-8.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7ac859a52486ac319e37f61469bbb9023faef38018223efa74e953f1fe23d36"},
{file = "mysql_connector_python-8.0.31-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:79d6a6e8ce955df5ca0786cb8ed8fbd999745c9b50def89993a2a0f4732de721"},
{file = "mysql_connector_python-8.0.31-cp311-cp311-manylinux1_i686.whl", hash = "sha256:e60426af313dcd526028d018d70757a82c5cc0673776b2a614e2180b5970feed"},
{file = "mysql_connector_python-8.0.31-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:d0ca1ba3e5fb2f2cddcf271c320cd5c368f8d392c034ddab7a1c8dfd19510351"},
{file = "mysql_connector_python-8.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:a1d8c1509c740649f352400d50360185e5473371507bb6498ceda0c6e877920c"},
{file = "mysql_connector_python-8.0.31-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:447847396d1b51edd9cfe05a8c5ba82836d8ea4866f25f36a836cab322fdc4f0"},
{file = "mysql_connector_python-8.0.31-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5e01a2f50378c13407a32e40dd4d225cfee5996d9d11968f76720ec28aa45421"},
{file = "mysql_connector_python-8.0.31-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ac85883ec3b3a9a0e36cacc89b8f5e666206842c432a5f69b09a7687ddf51d4a"},
{file = "mysql_connector_python-8.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:28cb3667be64ebfbd3d477bbd2c71e50d48bd5ed7ba2072dd460ae886d27e88e"},
{file = "mysql_connector_python-8.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:30f4542d4d20357c79604e6bf1a801e71dfc45c759c22b502ca5aa8122c3e859"},
{file = "mysql_connector_python-8.0.31-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:e9e5ad544adfc82ffbda2c74685c8c953bce2e212c56f117020079f05e2c68b2"},
{file = "mysql_connector_python-8.0.31-cp38-cp38-manylinux1_i686.whl", hash = "sha256:744c976569e81eecce5e8c7e8f80df2a1c3f64414829addc69c64aef8f56d091"},
{file = "mysql_connector_python-8.0.31-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17d6ea22dacca7fa78a73a81f2b186d4c5c6e70b7be314e352526654e9ba4713"},
{file = "mysql_connector_python-8.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:ae1b3d03802474a161cce8a97024484d18bef43b86d20114908cbc263817cade"},
{file = "mysql_connector_python-8.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:746df133c677fbe4687da33aad5a711abdd9bd2277bbc350e20f903f07c81ef5"},
{file = "mysql_connector_python-8.0.31-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4d75e6c3a7f18004e8279cbd9f5edc70089d6aaf3cb64374e21098d9bf0b93c4"},
{file = "mysql_connector_python-8.0.31-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8ad0d08f3f7c9e48d6d102c7de718e5e44f630f916ff2f4b4ff8a3756b5d10ac"},
{file = "mysql_connector_python-8.0.31-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:02526f16eacc3961ff681c5c8455d2306a9b45124f2f012ca75a1eac9ceb5165"},
{file = "mysql_connector_python-8.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:b2bbf443f6346e46c26a3e91dd96a428a1038f2d3c5e466541078479c64a1833"},
{file = "mysql_connector_python-8.0.31-py2.py3-none-any.whl", hash = "sha256:9be9c4dcae987a2a3f07b2ad984984c24f90887dbfab3c8a971e631ad4ca5ccf"},
{file = "mysql-connector-python-8.0.32.tar.gz", hash = "sha256:c2d20b29fd096a0633f9360c275bd2434d4bcf597281991c4b7f1c820cd07b84"},
{file = "mysql_connector_python-8.0.32-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4df11c683924ef34c177a54887dc4844ae735b01c8a29ce6ab92d6d3db7a2757"},
{file = "mysql_connector_python-8.0.32-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4b2d00c9e2cb9e3d11c57ec411226f43aa627607085fbed661cfea1c4dc57f61"},
{file = "mysql_connector_python-8.0.32-cp310-cp310-manylinux1_i686.whl", hash = "sha256:992b7a464daa398e86df8c75f7d8cd6044f884ff9087e782120fc8beff96c638"},
{file = "mysql_connector_python-8.0.32-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:232095f0c36266510009b0f1214d2823a649efb8bc511dbab9ce8847f66ab08a"},
{file = "mysql_connector_python-8.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:fd233c83daaf048c1f9827be984c2721576ae0adf50e139429a06ccd094987d9"},
{file = "mysql_connector_python-8.0.32-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ab13dd6ede0e0e99ba97c73946462c3420625ab6e63fe13b6fc350e30eb3298d"},
{file = "mysql_connector_python-8.0.32-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:e722b6ffa5b0d7188eebac792b18bc871643db505bf60d0e6bd2859f31e5ed79"},
{file = "mysql_connector_python-8.0.32-cp311-cp311-manylinux1_i686.whl", hash = "sha256:283fe6f647e9d684feb1b7c48fa6a46b1e72c59ecdd6ea2b62392cd80c1a6701"},
{file = "mysql_connector_python-8.0.32-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:1c0a11f3ffbf850f2ca7b39e6c82021e8de910ddaeffd856e53dca028d21c923"},
{file = "mysql_connector_python-8.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:6cdba2779bcd16af0ceff0a6e50d33e6664a83f8d17d70524beb6f677a6d1fae"},
{file = "mysql_connector_python-8.0.32-cp37-cp37m-macosx_12_0_x86_64.whl", hash = "sha256:93b1eb3e07d19a23ccf2605d818aacee0d842b1820bbeef8d0022d8d3d014ab9"},
{file = "mysql_connector_python-8.0.32-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:d6b54656ca131a4f0f17b9d0adddc60f84fd982d64e06360026d5b06e5dbf865"},
{file = "mysql_connector_python-8.0.32-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8c5bfedc979d7858402f39c20d66a6cf03ca4c960732a98318126c278535ddb2"},
{file = "mysql_connector_python-8.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd716b1e162fe4b3887f6617e9ddcfa659ba96a9ddb22feeae208a72f43d22f"},
{file = "mysql_connector_python-8.0.32-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:bd52a462759aa324a60054c4b44dc8b32007187a328f72be6b58f193d5e32a91"},
{file = "mysql_connector_python-8.0.32-cp38-cp38-manylinux1_i686.whl", hash = "sha256:be82357cc7e7e1377e2f4f8c18aa89c8aab6c0117155cf9fcf18e3cd0eb6ac8e"},
{file = "mysql_connector_python-8.0.32-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:1f399f3c2599d2591854cd0e0a24c7c399dff21ac5accb6e52e06924de29f3f4"},
{file = "mysql_connector_python-8.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:c8bba02501525e1fbbba094a6d8d391d1534e8be41be6396c3e1b9f7d9d13b1c"},
{file = "mysql_connector_python-8.0.32-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:145aeb75eefb7425e0a7fb36a4f95ebfe79e06be7c69a4045d34cde95c666dc4"},
{file = "mysql_connector_python-8.0.32-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:c990f4c0702d1739076261c4dece1042e1eb18bf34e0d8516d19ec5166a205ce"},
{file = "mysql_connector_python-8.0.32-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7f7a69db9e0c36764a6c65377f6174aee46e484520e48659e7aa674415b8e192"},
{file = "mysql_connector_python-8.0.32-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:677b5c6dcaec7e2a4bf95b991a869f4d371114f69a0d9a5bb236e988c8f4c376"},
{file = "mysql_connector_python-8.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:8c334c41cd1c5bcfa3550340253ef7d9d3b962211f33327c20f69706a0bcce06"},
{file = "mysql_connector_python-8.0.32-py2.py3-none-any.whl", hash = "sha256:e0299236297b63bf6cbb61d81a9d400bc01cad4743d1abe5296ef349de15ee53"},
]
nodeenv = [
{file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
@ -2880,7 +2812,10 @@ orjson = [
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
@ -2950,30 +2885,28 @@ prompt-toolkit = [
{file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"},
]
protobuf = [
{file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"},
{file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"},
{file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"},
{file = "protobuf-3.20.1-cp310-cp310-win32.whl", hash = "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c"},
{file = "protobuf-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7"},
{file = "protobuf-3.20.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153"},
{file = "protobuf-3.20.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f"},
{file = "protobuf-3.20.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20"},
{file = "protobuf-3.20.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531"},
{file = "protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e"},
{file = "protobuf-3.20.1-cp37-cp37m-win32.whl", hash = "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c"},
{file = "protobuf-3.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067"},
{file = "protobuf-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf"},
{file = "protobuf-3.20.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab"},
{file = "protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c"},
{file = "protobuf-3.20.1-cp38-cp38-win32.whl", hash = "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7"},
{file = "protobuf-3.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739"},
{file = "protobuf-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7"},
{file = "protobuf-3.20.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f"},
{file = "protobuf-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9"},
{file = "protobuf-3.20.1-cp39-cp39-win32.whl", hash = "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8"},
{file = "protobuf-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91"},
{file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"},
{file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"},
{file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"},
{file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"},
{file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"},
{file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"},
{file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"},
{file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"},
{file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"},
{file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"},
{file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"},
{file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"},
{file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"},
{file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"},
{file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"},
{file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"},
{file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"},
{file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"},
{file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"},
{file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"},
{file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"},
{file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"},
{file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"},
{file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"},
]
psycopg2 = [
{file = "psycopg2-2.9.4-cp310-cp310-win32.whl", hash = "sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797"},
@ -2988,21 +2921,6 @@ psycopg2 = [
{file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"},
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
]
pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pycodestyle = [
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
@ -3070,14 +2988,6 @@ python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
python-jose = [
{file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"},
{file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"},
]
python-keycloak = [
{file = "python-keycloak-2.6.0.tar.gz", hash = "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96"},
{file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"},
]
pytz = [
{file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"},
{file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"},
@ -3216,10 +3126,6 @@ requests = [
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
requests-toolbelt = [
{file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
{file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
]
restrictedpython = [
{file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"},
{file = "RestrictedPython-6.0.tar.gz", hash = "sha256:405cf0bd9eec2f19b1326b5f48228efe56d6590b4e91826b8cc3b2cd400a96ad"},
@ -3227,10 +3133,6 @@ restrictedpython = [
restructuredtext-lint = [
{file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"},
]
rsa = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
]
"ruamel.yaml" = [
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},

View File

@ -27,8 +27,9 @@ flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
#SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
# temporarily switch off main to fix CI because poetry export doesn't capture the revision if it's not here (it ignores the lock)
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "be26100bcbef8026e26312c665dae42faf476485"}
# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0"
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
@ -44,7 +45,6 @@ marshmallow-enum = "^1.5.1"
marshmallow-sqlalchemy = "^0.28.0"
PyJWT = "^2.6.0"
gunicorn = "^20.1.0"
python-keycloak = "^2.5.0"
APScheduler = "*"
Jinja2 = "^3.1.2"
RestrictedPython = "^6.0"
@ -72,6 +72,7 @@ simplejson = "^3.17.6"
pytz = "^2022.6"
dateparser = "^1.1.2"
types-dateparser = "^1.1.4.1"
flask-jwt-extended = "^4.4.4"
[tool.poetry.dev-dependencies]
@ -79,7 +80,7 @@ pytest = "*"
coverage = {extras = ["toml"], version = "^6.1"}
safety = "^2.3.1"
mypy = ">=0.961"
typeguard = "^2.13.2"
typeguard = "^2"
xdoctest = {extras = ["colors"], version = "^1.0.1"}
sphinx = "^5.0.2"
sphinx-autobuild = ">=2021.3.14"

View File

@ -23,6 +23,7 @@ from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_b
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
openid_blueprint,
)
from spiffworkflow_backend.routes.user import set_new_access_token_in_cookie
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
from spiffworkflow_backend.services.authorization_service import AuthorizationService
@ -115,7 +116,7 @@ def create_app() -> flask.app.Flask:
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.")
for o in app.config["CORS_ALLOW_ORIGINS"]
]
CORS(app, origins=origins_re, max_age=3600)
CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
@ -124,13 +125,18 @@ def create_app() -> flask.app.Flask:
app.json = MyJSONEncoder(app)
if app.config["RUN_BACKGROUND_SCHEDULER"]:
# do not start the scheduler twice in flask debug mode
if (
app.config["RUN_BACKGROUND_SCHEDULER"]
and os.environ.get("WERKZEUG_RUN_MAIN") != "true"
):
start_scheduler(app)
configure_sentry(app)
app.before_request(verify_token)
app.before_request(AuthorizationService.check_for_permission)
app.after_request(set_new_access_token_in_cookie)
return app # type: ignore

View File

@ -397,6 +397,12 @@ paths:
description: the modified process model id
schema:
type: string
- name: include_file_references
in: query
required: false
description: include all file references in the return
schema:
type: boolean
get:
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_show
summary: Returns a single process model
@ -628,6 +634,12 @@ paths:
description: The identifier of the group to get the process instances for
schema:
type: string
- name: process_initiator_username
in: query
required: false
description: The username of the process initiator
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me
summary: Returns a list of process instances that are associated with me.
@ -741,6 +753,12 @@ paths:
description: The identifier of the group to get the process instances for
schema:
type: string
- name: process_initiator_username
in: query
required: false
description: The username of the process initiator
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list
summary: Returns a list of process instances.
@ -1315,6 +1333,12 @@ paths:
/tasks:
parameters:
- name: process_instance_id
in: query
required: false
description: The process instance id to search by.
schema:
type: integer
- name: page
in: query
required: false
@ -1808,7 +1832,7 @@ paths:
post:
tags:
- Messages
operationId: spiffworkflow_backend.routes.messages_controller.message_start
operationId: spiffworkflow_backend.routes.messages_controller.message_send
summary: Instantiate and run a given process model with a message start event matching given identifier
requestBody:
content:

View File

@ -63,7 +63,6 @@ def setup_config(app: Flask) -> None:
)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config.from_object("spiffworkflow_backend.config.default")
print("loaded config: default")
env_config_prefix = "spiffworkflow_backend.config."
if (
@ -71,7 +70,6 @@ def setup_config(app: Flask) -> None:
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
):
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
print("loaded config: terraform_deployed_environment")
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
load_config_file(app, env_config_module)
@ -90,14 +88,6 @@ def setup_config(app: Flask) -> None:
"permissions",
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
)
print(
"set permissions file name config:"
f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}"
)
print(
"set permissions file name full path:"
f" {app.config['PERMISSIONS_FILE_FULLPATH']}"
)
# unversioned (see .gitignore) config that can override everything and include secrets.
# src/spiffworkflow_backend/config/secrets.py

View File

@ -29,8 +29,11 @@ CONNECTOR_PROXY_URL = environ.get(
# Open ID server
OPEN_ID_SERVER_URL = environ.get(
"OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow"
"OPEN_ID_SERVER_URL",
default="http://localhost:7002/realms/spiffworkflow"
# "OPEN_ID_SERVER_URL", default="http://localhost:7000/openid"
)
# Replace above line with this to use the built-in Open ID Server.
# OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7000/openid")
OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend")
@ -74,3 +77,7 @@ SPIFF_DATABASE_TYPE = environ.get(
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None
)
SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get(
"SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID",
default="Message_SystemMessageNotification",
)

View File

@ -10,7 +10,7 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
)
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="true") == "true"
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
)
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"

View File

@ -106,6 +106,11 @@ permissions:
users: []
allowed_permissions: [create, read, update, delete]
uri: /process-instances/reports/*
read-process-instances-find-by-id:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /process-instances/find-by-id/*
processes-read:
groups: [everybody]
users: []

View File

@ -98,12 +98,14 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"status": self.status,
"start_in_seconds": self.start_in_seconds,
"end_in_seconds": self.end_in_seconds,
"created_at_in_seconds": self.created_at_in_seconds,
"updated_at_in_seconds": self.updated_at_in_seconds,
"process_initiator_id": self.process_initiator_id,
"bpmn_xml_file_contents": self.bpmn_xml_file_contents,
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
"bpmn_version_control_type": self.bpmn_version_control_type,
"spiff_step": self.spiff_step,
"username": self.process_initiator.username,
"process_initiator_username": self.process_initiator.username,
}
@property

View File

@ -54,6 +54,9 @@ class ProcessModelInfo:
return False
# for use with os.path.join so it can work on windows
# NOTE: in APIs, ids should always have forward slashes, even in windows.
# this is because we have to store ids in the database, and we want the same
# database snapshot to work on any OS.
def id_for_file_path(self) -> str:
"""Id_for_file_path."""
return self.id.replace("/", os.sep)

View File

@ -1,13 +1,11 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
import flask.wrappers
from flask import make_response
from flask.wrappers import Response
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
def status() -> flask.wrappers.Response:
def status() -> Response:
"""Status."""
ProcessInstanceModel.query.filter().first()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
return make_response({"ok": True}, 200)

View File

@ -19,6 +19,7 @@ from spiffworkflow_backend.models.message_triggerable_process_model import (
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.routes.process_api_blueprint import (
_find_process_instance_by_id_or_raise,
)
@ -90,7 +91,7 @@ def message_instance_list(
# payload: dict,
# process_instance_id: Optional[int],
# }
def message_start(
def message_send(
message_identifier: str,
body: Dict[str, Any],
) -> flask.wrappers.Response:
@ -121,6 +122,26 @@ def message_start(
body["process_instance_id"]
)
if process_instance.status == ProcessInstanceStatus.suspended.value:
raise ApiError(
error_code="process_instance_is_suspended",
message=(
f"Process Instance '{process_instance.id}' is suspended and cannot"
" accept messages.'"
),
status_code=400,
)
if process_instance.status == ProcessInstanceStatus.terminated.value:
raise ApiError(
error_code="process_instance_is_terminated",
message=(
f"Process Instance '{process_instance.id}' is terminated and cannot"
" accept messages.'"
),
status_code=400,
)
message_instance = MessageInstanceModel.query.filter_by(
process_instance_id=process_instance.id,
message_model_id=message_model.id,

View File

@ -20,11 +20,29 @@ from spiffworkflow_backend.routes.process_api_blueprint import (
_un_modify_modified_process_model_id,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.process_model_service import (
ProcessModelWithInstancesNotDeletableError,
)
def process_group_create(body: dict) -> flask.wrappers.Response:
"""Add_process_group."""
process_group = ProcessGroup(**body)
if ProcessModelService.is_process_model_identifier(process_group.id):
raise ApiError(
error_code="process_model_with_id_already_exists",
message=f"Process Model with given id already exists: {process_group.id}",
status_code=400,
)
if ProcessModelService.is_process_group_identifier(process_group.id):
raise ApiError(
error_code="process_group_with_id_already_exists",
message=f"Process Group with given id already exists: {process_group.id}",
status_code=400,
)
ProcessModelService.add_process_group(process_group)
_commit_and_push_to_git(
f"User: {g.user.username} added process group {process_group.id}"
@ -35,7 +53,16 @@ def process_group_create(body: dict) -> flask.wrappers.Response:
def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
"""Process_group_delete."""
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
try:
ProcessModelService().process_group_delete(process_group_id)
except ProcessModelWithInstancesNotDeletableError as exception:
raise ApiError(
error_code="existing_instances",
message=str(exception),
status_code=400,
) from exception
_commit_and_push_to_git(
f"User: {g.user.username} deleted process group {process_group_id}"
)
@ -54,6 +81,13 @@ def process_group_update(
}
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
if not ProcessModelService.is_process_group_identifier(process_group_id):
raise ApiError(
error_code="process_group_does_not_exist",
message=f"Process Group with given id does not exist: {process_group_id}",
status_code=400,
)
process_group = ProcessGroup(id=process_group_id, **body_filtered)
ProcessModelService.update_process_group(process_group)
_commit_and_push_to_git(
@ -88,7 +122,7 @@ def process_group_list(
"pages": pages,
},
}
return Response(json.dumps(response_json), status=200, mimetype="application/json")
return make_response(jsonify(response_json), 200)
def process_group_show(

View File

@ -181,7 +181,20 @@ def process_instance_log_list(
SpiffLoggingModel.process_instance_id == process_instance.id
)
if not detailed:
log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
log_query = log_query.filter(
# this was the previous implementation, where we only show completed tasks and skipped tasks.
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
# or_(
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
# )
and_(
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
SpiffLoggingModel.bpmn_task_type.in_( # type: ignore
["Default Throwing Event", "End Event", "Default Start Event"]
),
)
)
logs = (
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
@ -219,6 +232,7 @@ def process_instance_list_for_me(
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
user_group_identifier: Optional[str] = None,
process_initiator_username: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list_for_me."""
return process_instance_list(
@ -252,6 +266,7 @@ def process_instance_list(
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
user_group_identifier: Optional[str] = None,
process_initiator_username: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list."""
process_instance_report = ProcessInstanceReportService.report_with_identifier(
@ -268,6 +283,7 @@ def process_instance_list(
end_to=end_to,
with_relation_to_me=with_relation_to_me,
process_status=process_status.split(",") if process_status else None,
process_initiator_username=process_initiator_username,
)
else:
report_filter = (
@ -281,6 +297,7 @@ def process_instance_list(
end_to=end_to,
process_status=process_status,
with_relation_to_me=with_relation_to_me,
process_initiator_username=process_initiator_username,
)
)
@ -547,7 +564,13 @@ def process_instance_task_list(
else:
spiff_tasks = processor.get_all_user_tasks()
subprocesses_by_child_task_ids = processor.get_subprocesses_by_child_task_ids()
subprocesses_by_child_task_ids, task_typename_by_task_id = (
processor.get_subprocesses_by_child_task_ids()
)
processor.get_highest_level_calling_subprocesses_by_child_task_ids(
subprocesses_by_child_task_ids, task_typename_by_task_id
)
tasks = []
for spiff_task in spiff_tasks:
calling_subprocess_task_id = subprocesses_by_child_task_ids.get(

View File

@ -15,6 +15,7 @@ from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from werkzeug.datastructures import FileStorage
from spiffworkflow_backend.interfaces import IdToProcessGroupMapping
from spiffworkflow_backend.models.file import FileSchema
@ -31,15 +32,24 @@ from spiffworkflow_backend.routes.process_api_blueprint import (
)
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.git_service import MissingGitConfigsError
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportNotFoundError,
)
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.process_model_service import (
ProcessModelWithInstancesNotDeletableError,
)
from spiffworkflow_backend.services.spec_file_service import (
ProcessModelFileInvalidError,
)
from spiffworkflow_backend.services.spec_file_service import SpecFileService
def process_model_create(
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
modified_process_group_id: str, body: Dict[str, Union[str, bool, int, None, list]]
) -> flask.wrappers.Response:
"""Process_model_create."""
body_include_list = [
@ -49,6 +59,8 @@ def process_model_create(
"primary_process_id",
"description",
"metadata_extraction_paths",
"fault_or_suspend_on_exception",
"exception_notification_addresses",
]
body_filtered = {
include_item: body[include_item]
@ -66,6 +78,24 @@ def process_model_create(
status_code=400,
)
if ProcessModelService.is_process_model_identifier(process_model_info.id):
raise ApiError(
error_code="process_model_with_id_already_exists",
message=(
f"Process Model with given id already exists: {process_model_info.id}"
),
status_code=400,
)
if ProcessModelService.is_process_group_identifier(process_model_info.id):
raise ApiError(
error_code="process_group_with_id_already_exists",
message=(
f"Process Group with given id already exists: {process_model_info.id}"
),
status_code=400,
)
ProcessModelService.add_process_model(process_model_info)
_commit_and_push_to_git(
f"User: {g.user.username} created process model {process_model_info.id}"
@ -82,7 +112,15 @@ def process_model_delete(
) -> flask.wrappers.Response:
"""Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
try:
ProcessModelService().process_model_delete(process_model_identifier)
except ProcessModelWithInstancesNotDeletableError as exception:
raise ApiError(
error_code="existing_instances",
message=str(exception),
status_code=400,
) from exception
_commit_and_push_to_git(
f"User: {g.user.username} deleted process model {process_model_identifier}"
)
@ -90,7 +128,8 @@ def process_model_delete(
def process_model_update(
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
modified_process_model_identifier: str,
body: Dict[str, Union[str, bool, int, None, list]],
) -> Any:
"""Process_model_update."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
@ -100,6 +139,8 @@ def process_model_update(
"primary_process_id",
"description",
"metadata_extraction_paths",
"fault_or_suspend_on_exception",
"exception_notification_addresses",
]
body_filtered = {
include_item: body[include_item]
@ -115,7 +156,9 @@ def process_model_update(
return ProcessModelInfoSchema().dump(process_model)
def process_model_show(modified_process_model_identifier: str) -> Any:
def process_model_show(
modified_process_model_identifier: str, include_file_references: bool = False
) -> Any:
"""Process_model_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
@ -124,8 +167,12 @@ def process_model_show(modified_process_model_identifier: str) -> Any:
key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
)
process_model.files = files
if include_file_references:
for file in process_model.files:
file.references = SpecFileService.get_references_for_file(file, process_model)
file.references = SpecFileService.get_references_for_file(
file, process_model
)
process_model.parent_groups = ProcessModelService.get_parent_group_array(
process_model.id
@ -218,26 +265,11 @@ def process_model_file_update(
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_update."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
request_file = _get_file_from_request()
request_file_contents = request_file.stream.read()
if not request_file_contents:
raise ApiError(
error_code="file_contents_empty",
message="Given request file does not have any content",
status_code=400,
message = f"User: {g.user.username} clicked save for"
return _create_or_update_process_model_file(
modified_process_model_identifier, message, 200
)
SpecFileService.update_file(process_model, file_name, request_file_contents)
_commit_and_push_to_git(
f"User: {g.user.username} clicked save for"
f" {process_model_identifier}/{file_name}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_model_file_delete(
modified_process_model_identifier: str, file_name: str
@ -267,28 +299,9 @@ def process_model_file_create(
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_model_file_create."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
request_file = _get_file_from_request()
if not request_file.filename:
raise ApiError(
error_code="could_not_get_filename",
message="Could not get filename from request",
status_code=400,
)
file = SpecFileService.add_file(
process_model, request_file.filename, request_file.stream.read()
)
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
_commit_and_push_to_git(
f"User: {g.user.username} added process model file"
f" {process_model_identifier}/{file.name}"
)
return Response(
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
message = f"User: {g.user.username} added process model file"
return _create_or_update_process_model_file(
modified_process_model_identifier, message, 201
)
@ -437,6 +450,10 @@ def process_model_create_with_natural_language(
default_report_metadata = ProcessInstanceReportService.system_metadata_map(
"default"
)
if default_report_metadata is None:
raise ProcessInstanceReportNotFoundError(
"Could not find a report with identifier 'default'"
)
for column in columns:
default_report_metadata["columns"].append(
{"Header": column, "accessor": column, "filterable": True}
@ -454,9 +471,9 @@ def process_model_create_with_natural_language(
)
def _get_file_from_request() -> Any:
def _get_file_from_request() -> FileStorage:
"""Get_file_from_request."""
request_file = connexion.request.files.get("file")
request_file: FileStorage = connexion.request.files.get("file")
if not request_file:
raise ApiError(
error_code="no_file_given",
@ -494,3 +511,58 @@ def _get_process_group_from_modified_identifier(
status_code=400,
)
return process_group
def _create_or_update_process_model_file(
modified_process_model_identifier: str,
message_for_git_commit: str,
http_status_to_return: int,
) -> flask.wrappers.Response:
"""_create_or_update_process_model_file."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
request_file = _get_file_from_request()
# for mypy
request_file_contents = request_file.stream.read()
if not request_file_contents:
raise ApiError(
error_code="file_contents_empty",
message="Given request file does not have any content",
status_code=400,
)
if not request_file.filename:
raise ApiError(
error_code="could_not_get_filename",
message="Could not get filename from request",
status_code=400,
)
file = None
try:
file = SpecFileService.update_file(
process_model, request_file.filename, request_file_contents
)
except ProcessModelFileInvalidError as exception:
raise (
ApiError(
error_code="process_model_file_invalid",
message=(
f"Invalid Process model file cannot be save: {request_file.name}."
f" Received error: {str(exception)}"
),
status_code=400,
)
) from exception
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
_commit_and_push_to_git(
f"{message_for_git_commit} {process_model_identifier}/{file.name}"
)
return Response(
json.dumps(FileSchema().dump(file)),
status=http_status_to_return,
mimetype="application/json",
)

View File

@ -49,7 +49,7 @@ def script_unit_test_create(
# TODO: move this to an xml service or something
file_contents = SpecFileService.get_data(process_model, file.name)
bpmn_etree_element = etree.fromstring(file_contents)
bpmn_etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents)
nsmap = bpmn_etree_element.nsmap
spiff_element_maker = ElementMaker(

View File

@ -1,7 +1,6 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Dict
from typing import Optional
from flask import g
from flask import jsonify
@ -15,9 +14,10 @@ from spiffworkflow_backend.services.secret_service import SecretService
from spiffworkflow_backend.services.user_service import UserService
def secret_show(key: str) -> Optional[str]:
def secret_show(key: str) -> Response:
"""Secret_show."""
return SecretService.get_secret(key)
secret = SecretService.get_secret(key)
return make_response(jsonify(secret), 200)
def secret_list(

View File

@ -10,6 +10,7 @@ from typing import Union
import flask.wrappers
import jinja2
from flask import current_app
from flask import g
from flask import jsonify
from flask import make_response
@ -23,6 +24,7 @@ from sqlalchemy import asc
from sqlalchemy import desc
from sqlalchemy import func
from sqlalchemy.orm import aliased
from sqlalchemy.orm.util import AliasedClass
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
@ -67,35 +69,64 @@ class ReactJsonSchemaSelectOption(TypedDict):
# TODO: see comment for before_request
# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
def task_list_my_tasks(
process_instance_id: Optional[int] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_my_tasks."""
principal = _find_principal_or_raise()
human_tasks = (
assigned_user = aliased(UserModel)
process_initiator_user = aliased(UserModel)
human_task_query = (
HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore
.join(ProcessInstanceModel)
.join(HumanTaskUserModel)
.filter_by(user_id=principal.user_id)
.group_by(HumanTaskModel.id)
.join(
ProcessInstanceModel,
ProcessInstanceModel.id == HumanTaskModel.process_instance_id,
)
.join(
process_initiator_user,
process_initiator_user.id == ProcessInstanceModel.process_initiator_id,
)
.join(HumanTaskUserModel, HumanTaskUserModel.human_task_id == HumanTaskModel.id)
.filter(HumanTaskUserModel.user_id == principal.user_id)
.outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id)
.filter(HumanTaskModel.completed == False) # noqa: E712
# just need this add_columns to add the process_model_identifier. Then add everything back that was removed.
.add_columns(
ProcessInstanceModel.process_model_identifier,
ProcessInstanceModel.process_model_display_name,
ProcessInstanceModel.status,
.outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
)
if process_instance_id is not None:
human_task_query = human_task_query.filter(
ProcessInstanceModel.id == process_instance_id
)
potential_owner_usernames_from_group_concat_or_similar = (
_get_potential_owner_usernames(assigned_user)
)
human_tasks = human_task_query.add_columns(
HumanTaskModel.task_id.label("id"), # type: ignore
HumanTaskModel.task_name,
HumanTaskModel.task_title,
HumanTaskModel.task_type,
HumanTaskModel.task_status,
HumanTaskModel.task_id,
HumanTaskModel.id,
HumanTaskModel.process_model_display_name,
HumanTaskModel.process_instance_id,
)
.paginate(page=page, per_page=per_page, error_out=False)
)
tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items]
ProcessInstanceModel.process_model_identifier,
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
ProcessInstanceModel.updated_at_in_seconds,
ProcessInstanceModel.created_at_in_seconds,
process_initiator_user.username.label("process_initiator_username"),
GroupModel.identifier.label("assigned_user_group_identifier"),
# func.max does not seem to return columns so we need to call both
func.max(ProcessInstanceModel.process_model_identifier),
func.max(ProcessInstanceModel.status.label("process_instance_status")), # type: ignore
func.max(ProcessInstanceModel.updated_at_in_seconds),
func.max(ProcessInstanceModel.created_at_in_seconds),
func.max(process_initiator_user.username.label("process_initiator_username")),
func.max(GroupModel.identifier.label("assigned_user_group_identifier")),
potential_owner_usernames_from_group_concat_or_similar,
).paginate(page=page, per_page=per_page, error_out=False)
response_json = {
"results": tasks,
"results": human_tasks.items,
"pagination": {
"count": len(human_tasks.items),
"total": human_tasks.total,
@ -416,6 +447,7 @@ def _get_tasks(
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
),
)
if has_lane_assignment_id:
if user_group_identifier:
human_tasks_query = human_tasks_query.filter(
@ -428,6 +460,10 @@ def _get_tasks(
else:
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
potential_owner_usernames_from_group_concat_or_similar = (
_get_potential_owner_usernames(assigned_user)
)
human_tasks = (
human_tasks_query.add_columns(
ProcessInstanceModel.process_model_identifier,
@ -440,9 +476,7 @@ def _get_tasks(
HumanTaskModel.task_title,
HumanTaskModel.process_model_display_name,
HumanTaskModel.process_instance_id,
func.group_concat(assigned_user.username.distinct()).label(
"potential_owner_usernames"
),
potential_owner_usernames_from_group_concat_or_similar,
)
.order_by(desc(HumanTaskModel.id)) # type: ignore
.paginate(page=page, per_page=per_page, error_out=False)
@ -561,3 +595,18 @@ def _update_form_schema_with_task_data_as_needed(
for o in value:
if isinstance(o, dict):
_update_form_schema_with_task_data_as_needed(o, task_data)
def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
"""_get_potential_owner_usernames."""
potential_owner_usernames_from_group_concat_or_similar = func.group_concat(
assigned_user.username.distinct()
).label("potential_owner_usernames")
db_type = current_app.config.get("SPIFF_DATABASE_TYPE")
if db_type == "postgres":
potential_owner_usernames_from_group_concat_or_similar = func.string_agg(
assigned_user.username.distinct(), ", "
).label("potential_owner_usernames")
return potential_owner_usernames_from_group_concat_or_similar

View File

@ -2,11 +2,13 @@
import ast
import base64
import json
import re
from typing import Any
from typing import Dict
from typing import Optional
from typing import Union
import flask
import jwt
from flask import current_app
from flask import g
@ -20,6 +22,7 @@ from spiffworkflow_backend.services.authentication_service import Authentication
from spiffworkflow_backend.services.authentication_service import (
MissingAccessTokenError,
)
from spiffworkflow_backend.services.authentication_service import TokenExpiredError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.user_service import UserService
@ -55,6 +58,9 @@ def verify_token(
if not token and "Authorization" in request.headers:
token = request.headers["Authorization"].removeprefix("Bearer ")
# This should never be set here but just in case
_clear_auth_tokens_from_thread_local_data()
if token:
user_model = None
decoded_token = get_decoded_token(token)
@ -71,12 +77,11 @@ def verify_token(
f" internal token. {e}"
)
elif "iss" in decoded_token.keys():
user_info = None
try:
if AuthenticationService.validate_id_token(token):
if AuthenticationService.validate_id_or_access_token(token):
user_info = decoded_token
except (
ApiError
) as ae: # API Error is only thrown in the token is outdated.
except TokenExpiredError as token_expired_error:
# Try to refresh the token
user = UserService.get_user_by_service_and_service_id(
decoded_token["iss"], decoded_token["sub"]
@ -90,17 +95,24 @@ def verify_token(
)
)
if auth_token and "error" not in auth_token:
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token["access_token"]
tld.new_id_token = auth_token["id_token"]
# We have the user, but this code is a bit convoluted, and will later demand
# a user_info object so it can look up the user. Sorry to leave this crap here.
user_info = {"sub": user.service_id}
else:
raise ae
else:
raise ae
else:
raise ae
user_info = {
"sub": user.service_id,
"iss": user.service,
}
if user_info is None:
raise ApiError(
error_code="invalid_token",
message="Your token is expired. Please Login",
status_code=401,
) from token_expired_error
except Exception as e:
current_app.logger.error(f"Exception raised in get_token: {e}")
raise ApiError(
error_code="fail_get_user_info",
message="Cannot get user info from token",
@ -150,8 +162,6 @@ def verify_token(
g.token = token
get_scope(token)
return None
# return {"uid": g.user.id, "sub": g.user.id, "scope": scope}
# return validate_scope(token, user_info, user_model)
else:
raise ApiError(error_code="no_user_id", message="Cannot get a user id")
@ -160,16 +170,44 @@ def verify_token(
)
def validate_scope(token: Any) -> bool:
"""Validate_scope."""
print("validate_scope")
# token = AuthenticationService.refresh_token(token)
# user_info = AuthenticationService.get_user_info_from_public_access_token(token)
# bearer_token = AuthenticationService.get_bearer_token(token)
# permission = AuthenticationService.get_permission_by_basic_token(token)
# permissions = AuthenticationService.get_permissions_by_token_for_resource_and_scope(token)
# introspection = AuthenticationService.introspect_token(basic_token)
return True
def set_new_access_token_in_cookie(
response: flask.wrappers.Response,
) -> flask.wrappers.Response:
"""Checks if a new token has been set in THREAD_LOCAL_DATA and sets cookies if appropriate.
It will also delete the cookies if the user has logged out.
"""
tld = current_app.config["THREAD_LOCAL_DATA"]
domain_for_frontend_cookie: Optional[str] = re.sub(
r"^https?:\/\/", "", current_app.config["SPIFFWORKFLOW_FRONTEND_URL"]
)
if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith(
"localhost"
):
domain_for_frontend_cookie = None
if hasattr(tld, "new_access_token") and tld.new_access_token:
response.set_cookie(
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
)
# id_token is required for logging out since this gets passed back to the openid server
if hasattr(tld, "new_id_token") and tld.new_id_token:
response.set_cookie(
"id_token", tld.new_id_token, domain=domain_for_frontend_cookie
)
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
response.set_cookie(
"id_token", "", max_age=0, domain=domain_for_frontend_cookie
)
response.set_cookie(
"access_token", "", max_age=0, domain=domain_for_frontend_cookie
)
_clear_auth_tokens_from_thread_local_data()
return response
def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
@ -226,7 +264,7 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
user_info = parse_id_token(id_token)
if AuthenticationService.validate_id_token(id_token):
if AuthenticationService.validate_id_or_access_token(id_token):
if user_info and "error" not in user_info:
user_model = AuthorizationService.create_user_from_sign_in(user_info)
g.user = user_model.id
@ -234,11 +272,10 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
AuthenticationService.store_refresh_token(
user_model.id, auth_token_object["refresh_token"]
)
redirect_url = (
f"{state_redirect_url}?"
+ f"access_token={auth_token_object['access_token']}&"
+ f"id_token={id_token}"
)
redirect_url = state_redirect_url
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token_object["access_token"]
tld.new_id_token = auth_token_object["id_token"]
return redirect(redirect_url)
raise ApiError(
@ -266,7 +303,6 @@ def login_api() -> Response:
def login_api_return(code: str, state: str, session_state: str) -> str:
"""Login_api_return."""
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
state_dict["redirect_url"]
@ -284,6 +320,8 @@ def logout(id_token: str, redirect_url: Optional[str]) -> Response:
"""Logout."""
if redirect_url is None:
redirect_url = ""
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.user_has_logged_out = True
return AuthenticationService().logout(redirect_url=redirect_url, id_token=id_token)
@ -312,15 +350,6 @@ def get_decoded_token(token: str) -> Optional[Dict]:
error_code="unknown_token",
message="Unknown token type in get_decoded_token",
)
# try:
# # see if we have an open_id token
# decoded_token = AuthorizationService.decode_auth_token(token)
# else:
# if 'sub' in decoded_token and 'iss' in decoded_token and 'aud' in decoded_token:
# token_type = 'id_token'
# if 'token_type' in decoded_token and 'sub' in decoded_token:
# return True
def get_scope(token: str) -> str:
@ -347,3 +376,14 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
return user
user = UserService.create_user(service_id, service, service_id)
return user
def _clear_auth_tokens_from_thread_local_data() -> None:
"""_clear_auth_tokens_from_thread_local_data."""
tld = current_app.config["THREAD_LOCAL_DATA"]
if hasattr(tld, "new_access_token"):
delattr(tld, "new_access_token")
if hasattr(tld, "new_id_token"):
delattr(tld, "new_id_token")
if hasattr(tld, "user_has_logged_out"):
delattr(tld, "user_has_logged_out")

View File

@ -20,6 +20,15 @@ class MissingAccessTokenError(Exception):
"""MissingAccessTokenError."""
# These could be either 'id' OR 'access' tokens and we can't always know which
class TokenExpiredError(Exception):
"""TokenExpiredError."""
class TokenInvalidError(Exception):
"""TokenInvalidError."""
class AuthenticationProviderTypes(enum.Enum):
"""AuthenticationServiceProviders."""
@ -125,18 +134,15 @@ class AuthenticationService:
return auth_token_object
@classmethod
def validate_id_token(cls, id_token: str) -> bool:
def validate_id_or_access_token(cls, token: str) -> bool:
"""Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation."""
valid = True
now = time.time()
try:
decoded_token = jwt.decode(id_token, options={"verify_signature": False})
decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e:
raise ApiError(
error_code="bad_id_token",
message="Cannot decode id_token",
status_code=401,
) from e
raise TokenInvalidError("Cannot decode token") from e
if decoded_token["iss"] != cls.server_url():
valid = False
elif (
@ -153,15 +159,10 @@ class AuthenticationService:
valid = False
if not valid:
current_app.logger.error(f"Invalid token in validate_id_token: {id_token}")
return False
if now > decoded_token["exp"]:
raise ApiError(
error_code="invalid_token",
message="Your token is expired. Please Login",
status_code=401,
)
raise TokenExpiredError("Your token is expired. Please Login")
return True

View File

@ -412,59 +412,6 @@ class AuthorizationService:
status_code=403,
)
# def refresh_token(self, token: str) -> str:
# """Refresh_token."""
# # if isinstance(token, str):
# # token = eval(token)
# (
# open_id_server_url,
# open_id_client_id,
# open_id_realm_name,
# open_id_client_secret_key,
# ) = AuthorizationService.get_open_id_args()
# headers = {"Content-Type": "application/x-www-form-urlencoded"}
# request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token"
# data = {
# "grant_type": "refresh_token",
# "client_id": "spiffworkflow-frontend",
# "subject_token": token,
# "refresh_token": token,
# }
# refresh_response = requests.post(request_url, headers=headers, data=data)
# refresh_token = json.loads(refresh_response.text)
# return refresh_token
# def get_bearer_token(self, basic_token: str) -> dict:
# """Get_bearer_token."""
# (
# open_id_server_url,
# open_id_client_id,
# open_id_realm_name,
# open_id_client_secret_key,
# ) = AuthorizationService.get_open_id_args()
#
# backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}"
# backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
# backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
#
# headers = {
# "Content-Type": "application/x-www-form-urlencoded",
# "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}",
# }
# data = {
# "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
# "client_id": open_id_client_id,
# "subject_token": basic_token,
# "audience": open_id_client_id,
# }
# request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token"
#
# backend_response = requests.post(request_url, headers=headers, data=data)
# # json_data = json.loads(backend_response.text)
# # bearer_token = json_data['access_token']
# bearer_token: dict = json.loads(backend_response.text)
# return bearer_token
@staticmethod
def decode_auth_token(auth_token: str) -> dict[str, Union[str, None]]:
"""Decode the auth token.

View File

@ -50,8 +50,6 @@ class DataSetupService:
)
)
current_app.logger.debug(
"DataSetupService.save_all_process_models() end"
)
current_app.logger.debug("DataSetupService.save_all_process_models() end")
db.session.commit()
return failing_process_models

View File

@ -1,14 +1,22 @@
"""Error_handling_service."""
from typing import Any
from typing import List
import json
from typing import Union
from flask import current_app
from flask import g
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from spiffworkflow_backend.models.message_model import MessageModel
from spiffworkflow_backend.models.message_triggerable_process_model import (
MessageTriggerableProcessModel,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.services.email_service import EmailService
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -37,6 +45,7 @@ class ErrorHandlingService:
process_model = ProcessModelService.get_process_model(
_processor.process_model_identifier
)
# First, suspend or fault the instance
if process_model.fault_or_suspend_on_exception == "suspend":
self.set_instance_status(
_processor.process_instance_model.id,
@ -49,57 +58,93 @@ class ErrorHandlingService:
ProcessInstanceStatus.error.value,
)
# Second, call the System Notification Process
# Note that this isn't the best way to do this.
# The configs are all in the model.
# Maybe we can move some of this to the notification process, or dmn tables.
if len(process_model.exception_notification_addresses) > 0:
try:
# some notification method (waku?)
self.handle_email_notification(
_processor, _error, process_model.exception_notification_addresses
)
self.handle_system_notification(_error, process_model)
except Exception as e:
# hmm... what to do if a notification method fails. Probably log, at least
print(e)
current_app.logger.error(e)
@staticmethod
def hanle_sentry_notification(_error: ApiError, _recipients: List) -> None:
"""SentryHandler."""
...
@staticmethod
def handle_email_notification(
processor: ProcessInstanceProcessor,
error: Union[ApiError, Exception],
recipients: List,
) -> None:
"""EmailHandler."""
subject = "Unexpected error in app"
if isinstance(error, ApiError):
content = f"{error.message}"
else:
content = str(error)
content_html = content
EmailService.add_email(
subject,
"sender@company.com",
recipients,
content,
content_html,
cc=None,
bcc=None,
reply_to=None,
attachment_files=None,
def handle_system_notification(
error: Union[ApiError, Exception], process_model: ProcessModelInfo
) -> Response:
"""Handle_system_notification."""
recipients = process_model.exception_notification_addresses
message_text = (
f"There was an exception running process {process_model.id}.\nOriginal"
f" Error:\n{error.__repr__()}"
)
message_payload = {"message_text": message_text, "recipients": recipients}
message_identifier = current_app.config[
"SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID"
]
message_model = MessageModel.query.filter_by(
identifier=message_identifier
).first()
message_triggerable_process_model = (
MessageTriggerableProcessModel.query.filter_by(
message_model_id=message_model.id
).first()
)
process_instance = MessageService.process_message_triggerable_process_model(
message_triggerable_process_model,
message_identifier,
message_payload,
g.user,
)
@staticmethod
def handle_waku_notification(_error: ApiError, _recipients: List) -> Any:
"""WakuHandler."""
# class WakuMessage:
# """WakuMessage."""
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
)
# @staticmethod
# def handle_sentry_notification(_error: ApiError, _recipients: List) -> None:
# """SentryHandler."""
# ...
#
# payload: str
# contentTopic: str # Optional
# version: int # Optional
# timestamp: int # Optional
# @staticmethod
# def handle_email_notification(
# processor: ProcessInstanceProcessor,
# error: Union[ApiError, Exception],
# recipients: List,
# ) -> None:
# """EmailHandler."""
# subject = "Unexpected error in app"
# if isinstance(error, ApiError):
# content = f"{error.message}"
# else:
# content = str(error)
# content_html = content
#
# EmailService.add_email(
# subject,
# "sender@company.com",
# recipients,
# content,
# content_html,
# cc=None,
# bcc=None,
# reply_to=None,
# attachment_files=None,
# )
#
# @staticmethod
# def handle_waku_notification(_error: ApiError, _recipients: List) -> Any:
# """WakuHandler."""
# # class WakuMessage:
# # """WakuMessage."""
# #
# # payload: str
# # contentTopic: str # Optional
# # version: int # Optional
# # timestamp: int # Optional
class FailingService:

View File

@ -42,7 +42,6 @@ class MessageService:
message_type="receive", status="ready"
).all()
for message_instance_send in message_instances_send:
# print(f"message_instance_send.id: {message_instance_send.id}")
# check again in case another background process picked up the message
# while the previous one was running
if message_instance_send.status != "ready":

View File

@ -34,6 +34,8 @@ from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # typ
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
@ -623,7 +625,25 @@ class ProcessInstanceProcessor:
db.session.add(pim)
db.session.commit()
def get_subprocesses_by_child_task_ids(self) -> dict:
def get_all_task_specs(self) -> dict[str, dict]:
"""This looks both at top level task_specs and subprocess_specs in the serialized data.
It returns a dict of all task specs based on the task name like it is in the serialized form.
NOTE: this may not fully work for tasks that are NOT call activities since their task_name may no be unique
but in our current use case we only care about the call activities here.
"""
serialized_data = json.loads(self.serialize())
spiff_task_json = serialized_data["spec"]["task_specs"] or {}
if "subprocess_specs" in serialized_data:
for _subprocess_task_name, subprocess_details in serialized_data[
"subprocess_specs"
].items():
if "task_specs" in subprocess_details:
spiff_task_json = spiff_task_json | subprocess_details["task_specs"]
return spiff_task_json
def get_subprocesses_by_child_task_ids(self) -> Tuple[dict, dict]:
"""Get all subprocess ids based on the child task ids.
This is useful when trying to link the child task of a call activity back to
@ -639,11 +659,46 @@ class ProcessInstanceProcessor:
call activities like subprocesses in terms of the serialization.
"""
bpmn_json = json.loads(self.serialize())
spiff_task_json = self.get_all_task_specs()
subprocesses_by_child_task_ids = {}
task_typename_by_task_id = {}
if "subprocesses" in bpmn_json:
for subprocess_id, subprocess_details in bpmn_json["subprocesses"].items():
for task_id in subprocess_details["tasks"]:
for task_id, task_details in subprocess_details["tasks"].items():
subprocesses_by_child_task_ids[task_id] = subprocess_id
task_name = task_details["task_spec"]
if task_name in spiff_task_json:
task_typename_by_task_id[task_id] = spiff_task_json[task_name][
"typename"
]
return (subprocesses_by_child_task_ids, task_typename_by_task_id)
def get_highest_level_calling_subprocesses_by_child_task_ids(
self, subprocesses_by_child_task_ids: dict, task_typename_by_task_id: dict
) -> dict:
"""Ensure task ids point to the top level subprocess id.
This is done by checking if a subprocess is also a task until the subprocess is no longer a task or a Call Activity.
"""
for task_id, subprocess_id in subprocesses_by_child_task_ids.items():
if subprocess_id in subprocesses_by_child_task_ids:
current_subprocess_id_for_task = subprocesses_by_child_task_ids[task_id]
if current_subprocess_id_for_task in task_typename_by_task_id:
# a call activity is like the top-level subprocess since it is the calling subprocess
# according to spiff and the top-level calling subprocess is really what we care about
if (
task_typename_by_task_id[current_subprocess_id_for_task]
== "CallActivity"
):
continue
subprocesses_by_child_task_ids[task_id] = (
subprocesses_by_child_task_ids[subprocess_id]
)
self.get_highest_level_calling_subprocesses_by_child_task_ids(
subprocesses_by_child_task_ids, task_typename_by_task_id
)
return subprocesses_by_child_task_ids
def save(self) -> None:
@ -770,6 +825,15 @@ class ProcessInstanceProcessor:
f"Manually executing Task {spiff_task.task_spec.name} of process"
f" instance {self.process_instance_model.id}"
)
# Executing a subworkflow manually will restart its subprocess and allow stepping through it
if isinstance(spiff_task.task_spec, SubWorkflowTask):
subprocess = self.bpmn_process_instance.get_subprocess(spiff_task)
# We have to get to the actual start event
for task in self.bpmn_process_instance.get_tasks(workflow=subprocess):
task.complete()
if isinstance(task.task_spec, StartEvent):
break
else:
spiff_task.complete()
else:
spiff_logger = logging.getLogger("spiff")
@ -779,7 +843,20 @@ class ProcessInstanceProcessor:
spiff_task._set_state(TaskState.COMPLETED)
for child in spiff_task.children:
child.task_spec._update(child)
self.bpmn_process_instance.last_task = spiff_task
spiff_task.workflow.last_task = spiff_task
if isinstance(spiff_task.task_spec, EndEvent):
for task in self.bpmn_process_instance.get_tasks(
TaskState.DEFINITE_MASK, workflow=spiff_task.workflow
):
task.complete()
# A subworkflow task will become ready when its workflow is complete. Engine steps would normally
# then complete it, but we have to do it ourselves here.
for task in self.bpmn_process_instance.get_tasks(TaskState.READY):
if isinstance(task.task_spec, SubWorkflowTask):
task.complete()
self.increment_spiff_step()
self.add_step()
self.save()
@ -944,10 +1021,10 @@ class ProcessInstanceProcessor:
for file in files:
data = SpecFileService.get_data(process_model_info, file.name)
if file.type == FileType.bpmn.value:
bpmn: etree.Element = etree.fromstring(data)
bpmn: etree.Element = SpecFileService.get_etree_from_xml_bytes(data)
parser.add_bpmn_xml(bpmn, filename=file.name)
elif file.type == FileType.dmn.value:
dmn: etree.Element = etree.fromstring(data)
dmn: etree.Element = SpecFileService.get_etree_from_xml_bytes(data)
parser.add_dmn_xml(dmn, filename=file.name)
if (
process_model_info.primary_process_id is None
@ -991,9 +1068,13 @@ class ProcessInstanceProcessor:
if bpmn_process_instance.is_completed():
return ProcessInstanceStatus.complete
user_tasks = bpmn_process_instance.get_ready_user_tasks()
waiting_tasks = bpmn_process_instance.get_tasks(TaskState.WAITING)
if len(waiting_tasks) > 0:
return ProcessInstanceStatus.waiting
# if the process instance has status "waiting" it will get picked up
# by background processing. when that happens it can potentially overwrite
# human tasks which is bad because we cache them with the previous id's.
# waiting_tasks = bpmn_process_instance.get_tasks(TaskState.WAITING)
# if len(waiting_tasks) > 0:
# return ProcessInstanceStatus.waiting
if len(user_tasks) > 0:
return ProcessInstanceStatus.user_input_required
else:
@ -1180,10 +1261,13 @@ class ProcessInstanceProcessor:
step_details = []
try:
self.bpmn_process_instance.refresh_waiting_tasks(
will_refresh_task=lambda t: self.increment_spiff_step(),
did_refresh_task=lambda t: step_details.append(
self.spiff_step_details_mapping()
),
#
# commenting out to see if this helps with the growing spiff steps/db issue
#
# will_refresh_task=lambda t: self.increment_spiff_step(),
# did_refresh_task=lambda t: step_details.append(
# self.spiff_step_details_mapping()
# ),
)
self.bpmn_process_instance.do_engine_steps(
@ -1227,8 +1311,34 @@ class ProcessInstanceProcessor:
except WorkflowTaskExecException as we:
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
def user_defined_task_data(self, task_data: dict) -> dict:
"""UserDefinedTaskData."""
return {k: v for k, v in task_data.items() if k != "current_user"}
def check_task_data_size(self) -> None:
"""CheckTaskDataSize."""
tasks_to_check = self.bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK)
task_data = [self.user_defined_task_data(task.data) for task in tasks_to_check]
task_data_to_check = list(filter(len, task_data))
try:
task_data_len = len(json.dumps(task_data_to_check))
except Exception:
task_data_len = 0
task_data_limit = 1024**2
if task_data_len > task_data_limit:
raise (
ApiError(
error_code="task_data_size_exceeded",
message=f"Maximum task data size of {task_data_limit} exceeded.",
)
)
def serialize(self) -> str:
"""Serialize."""
self.check_task_data_size()
return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore
def next_user_tasks(self) -> list[SpiffTask]:

View File

@ -3,10 +3,11 @@ import re
from dataclasses import dataclass
from typing import Any
from typing import Optional
from typing import Type
import sqlalchemy
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import and_
from sqlalchemy import func
from sqlalchemy import or_
@ -28,6 +29,10 @@ from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignme
from spiffworkflow_backend.services.process_model_service import ProcessModelService
class ProcessInstanceReportNotFoundError(Exception):
"""ProcessInstanceReportNotFoundError."""
@dataclass
class ProcessInstanceReportFilter:
"""ProcessInstanceReportFilter."""
@ -44,6 +49,7 @@ class ProcessInstanceReportFilter:
with_tasks_completed_by_me: Optional[bool] = None
with_tasks_assigned_to_my_group: Optional[bool] = None
with_relation_to_me: Optional[bool] = None
process_initiator_username: Optional[str] = None
def to_dict(self) -> dict[str, str]:
"""To_dict."""
@ -77,6 +83,8 @@ class ProcessInstanceReportFilter:
).lower()
if self.with_relation_to_me is not None:
d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
if self.process_initiator_username is not None:
d["process_initiator_username"] = str(self.process_initiator_username)
return d
@ -85,7 +93,7 @@ class ProcessInstanceReportService:
"""ProcessInstanceReportService."""
@classmethod
def system_metadata_map(cls, metadata_key: str) -> dict[str, Any]:
def system_metadata_map(cls, metadata_key: str) -> Optional[dict[str, Any]]:
"""System_metadata_map."""
# TODO replace with system reports that are loaded on launch (or similar)
temp_system_metadata_map = {
@ -106,16 +114,16 @@ class ProcessInstanceReportService:
{"Header": "status", "accessor": "status"},
],
"filter_by": [
{"field_name": "initiated_by_me", "field_value": True},
{"field_name": "has_terminal_status", "field_value": True},
{"field_name": "initiated_by_me", "field_value": "true"},
{"field_name": "has_terminal_status", "field_value": "true"},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_completed_instances_with_tasks_completed_by_me": {
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "with_tasks_completed_by_me", "field_value": True},
{"field_name": "has_terminal_status", "field_value": True},
{"field_name": "with_tasks_completed_by_me", "field_value": "true"},
{"field_name": "has_terminal_status", "field_value": "true"},
],
"order_by": ["-start_in_seconds", "-id"],
},
@ -124,13 +132,16 @@ class ProcessInstanceReportService:
"filter_by": [
{
"field_name": "with_tasks_assigned_to_my_group",
"field_value": True,
"field_value": "true",
},
{"field_name": "has_terminal_status", "field_value": True},
{"field_name": "has_terminal_status", "field_value": "true"},
],
"order_by": ["-start_in_seconds", "-id"],
},
}
if metadata_key not in temp_system_metadata_map:
return None
return temp_system_metadata_map[metadata_key]
@classmethod
@ -157,10 +168,17 @@ class ProcessInstanceReportService:
if process_instance_report is not None:
return process_instance_report # type: ignore
report_metadata = cls.system_metadata_map(report_identifier)
if report_metadata is None:
raise ProcessInstanceReportNotFoundError(
f"Could not find a report with identifier '{report_identifier}' for"
f" user '{user.username}'"
)
process_instance_report = ProcessInstanceReportModel(
identifier=report_identifier,
created_by_id=user.id,
report_metadata=cls.system_metadata_map(report_identifier),
report_metadata=report_metadata,
)
return process_instance_report # type: ignore
@ -210,20 +228,22 @@ class ProcessInstanceReportService:
with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me")
with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group")
with_relation_to_me = bool_value("with_relation_to_me")
process_initiator_username = filters.get("process_initiator_username")
report_filter = ProcessInstanceReportFilter(
process_model_identifier,
user_group_identifier,
start_from,
start_to,
end_from,
end_to,
process_status,
initiated_by_me,
has_terminal_status,
with_tasks_completed_by_me,
with_tasks_assigned_to_my_group,
with_relation_to_me,
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
initiated_by_me=initiated_by_me,
has_terminal_status=has_terminal_status,
with_tasks_completed_by_me=with_tasks_completed_by_me,
with_tasks_assigned_to_my_group=with_tasks_assigned_to_my_group,
with_relation_to_me=with_relation_to_me,
process_initiator_username=process_initiator_username,
)
return report_filter
@ -244,6 +264,7 @@ class ProcessInstanceReportService:
with_tasks_completed_by_me: Optional[bool] = None,
with_tasks_assigned_to_my_group: Optional[bool] = None,
with_relation_to_me: Optional[bool] = None,
process_initiator_username: Optional[str] = None,
) -> ProcessInstanceReportFilter:
"""Filter_from_metadata_with_overrides."""
report_filter = cls.filter_from_metadata(process_instance_report)
@ -268,6 +289,8 @@ class ProcessInstanceReportService:
report_filter.has_terminal_status = has_terminal_status
if with_tasks_completed_by_me is not None:
report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me
if process_initiator_username is not None:
report_filter.process_initiator_username = process_initiator_username
if with_tasks_assigned_to_my_group is not None:
report_filter.with_tasks_assigned_to_my_group = (
with_tasks_assigned_to_my_group
@ -297,7 +320,9 @@ class ProcessInstanceReportService:
return results
@classmethod
def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore
def get_column_names_for_model(
cls, model: Type[SpiffworkflowBaseDBModel]
) -> list[str]:
"""Get_column_names_for_model."""
return [i.name for i in model.__table__.columns]
@ -313,7 +338,11 @@ class ProcessInstanceReportService:
},
{"Header": "Start", "accessor": "start_in_seconds", "filterable": False},
{"Header": "End", "accessor": "end_in_seconds", "filterable": False},
{"Header": "Username", "accessor": "username", "filterable": False},
{
"Header": "Started By",
"accessor": "process_initiator_username",
"filterable": False,
},
{"Header": "Status", "accessor": "status", "filterable": False},
]
@ -386,6 +415,17 @@ class ProcessInstanceReportService:
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
if report_filter.process_initiator_username is not None:
user = UserModel.query.filter_by(
username=report_filter.process_initiator_username
).first()
process_initiator_id = -1
if user:
process_initiator_id = user.id
process_instance_query = process_instance_query.filter_by(
process_initiator_id=process_initiator_id
)
if (
not report_filter.with_tasks_completed_by_me
and not report_filter.with_tasks_assigned_to_my_group

View File

@ -27,6 +27,10 @@ from spiffworkflow_backend.services.user_service import UserService
T = TypeVar("T")
class ProcessModelWithInstancesNotDeletableError(Exception):
"""ProcessModelWithInstancesNotDeletableError."""
class ProcessModelService(FileSystemService):
"""ProcessModelService."""
@ -40,7 +44,12 @@ class ProcessModelService(FileSystemService):
PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema()
@classmethod
def is_group(cls, path: str) -> bool:
def path_to_id(cls, path: str) -> str:
"""Replace the os path separator for the standard id separator."""
return path.replace(os.sep, "/")
@classmethod
def is_process_group(cls, path: str) -> bool:
"""Is_group."""
group_json_path = os.path.join(path, cls.PROCESS_GROUP_JSON_FILE)
if os.path.exists(group_json_path):
@ -48,8 +57,8 @@ class ProcessModelService(FileSystemService):
return False
@classmethod
def is_group_identifier(cls, process_group_identifier: str) -> bool:
"""Is_group_identifier."""
def is_process_group_identifier(cls, process_group_identifier: str) -> bool:
"""Is_process_group_identifier."""
if os.path.exists(FileSystemService.root_path()):
process_group_path = os.path.abspath(
os.path.join(
@ -59,21 +68,21 @@ class ProcessModelService(FileSystemService):
),
)
)
return cls.is_group(process_group_path)
return cls.is_process_group(process_group_path)
return False
@classmethod
def is_model(cls, path: str) -> bool:
"""Is_model."""
def is_process_model(cls, path: str) -> bool:
"""Is_process_model."""
model_json_path = os.path.join(path, cls.PROCESS_MODEL_JSON_FILE)
if os.path.exists(model_json_path):
return True
return False
@classmethod
def is_model_identifier(cls, process_model_identifier: str) -> bool:
"""Is_model_identifier."""
def is_process_model_identifier(cls, process_model_identifier: str) -> bool:
"""Is_process_model_identifier."""
if os.path.exists(FileSystemService.root_path()):
process_model_path = os.path.abspath(
os.path.join(
@ -83,7 +92,7 @@ class ProcessModelService(FileSystemService):
),
)
)
return cls.is_model(process_model_path)
return cls.is_process_model(process_model_path)
return False
@ -125,7 +134,9 @@ class ProcessModelService(FileSystemService):
def save_process_model(cls, process_model: ProcessModelInfo) -> None:
"""Save_process_model."""
process_model_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), process_model.id)
os.path.join(
FileSystemService.root_path(), process_model.id_for_file_path()
)
)
os.makedirs(process_model_path, exist_ok=True)
json_path = os.path.abspath(
@ -146,12 +157,9 @@ class ProcessModelService(FileSystemService):
ProcessInstanceModel.process_model_identifier == process_model_id
).all()
if len(instances) > 0:
raise ApiError(
error_code="existing_instances",
message=(
raise ProcessModelWithInstancesNotDeletableError(
f"We cannot delete the model `{process_model_id}`, there are"
" existing instances that depend on it."
),
)
process_model = self.get_process_model(process_model_id)
path = self.workflow_path(process_model)
@ -192,7 +200,7 @@ class ProcessModelService(FileSystemService):
model_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), process_model_id)
)
if cls.is_model(model_path):
if cls.is_process_model(model_path):
return cls.get_process_model_from_relative_path(process_model_id)
raise ProcessEntityNotFoundError("process_model_not_found")
@ -228,7 +236,12 @@ class ProcessModelService(FileSystemService):
user = UserService.current_user()
new_process_model_list = []
for process_model in process_models:
uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
modified_process_model_id = (
ProcessModelInfo.modify_process_identifier_for_path_param(
process_model.id
)
)
uri = f"/v1.0/process-instances/{modified_process_model_id}"
has_permission = AuthorizationService.user_has_permission(
user=user, permission="create", target_uri=uri
)
@ -291,7 +304,7 @@ class ProcessModelService(FileSystemService):
FileSystemService.id_string_to_relative_path(process_group_id),
)
)
if cls.is_group(process_group_path):
if cls.is_process_group(process_group_path):
return cls.find_or_create_process_group(
process_group_path,
find_direct_nested_items=find_direct_nested_items,
@ -339,7 +352,7 @@ class ProcessModelService(FileSystemService):
for _root, dirs, _files in os.walk(group_path):
for dir in dirs:
model_dir = os.path.join(group_path, dir)
if ProcessModelService.is_model(model_dir):
if ProcessModelService.is_process_model(model_dir):
process_model = self.get_process_model(model_dir)
all_nested_models.append(process_model)
return all_nested_models
@ -357,13 +370,10 @@ class ProcessModelService(FileSystemService):
if len(instances) > 0:
problem_models.append(process_model)
if len(problem_models) > 0:
raise ApiError(
error_code="existing_instances",
message=(
raise ProcessModelWithInstancesNotDeletableError(
f"We cannot delete the group `{process_group_id}`, there are"
" models with existing instances inside the group."
f" {problem_models}"
),
)
shutil.rmtree(path)
self.cleanup_process_group_display_order()
@ -394,7 +404,7 @@ class ProcessModelService(FileSystemService):
process_groups = []
for item in directory_items:
# if item.is_dir() and not item.name[0] == ".":
if item.is_dir() and cls.is_group(item): # type: ignore
if item.is_dir() and cls.is_process_group(item): # type: ignore
scanned_process_group = cls.find_or_create_process_group(item.path)
process_groups.append(scanned_process_group)
return process_groups
@ -410,7 +420,7 @@ class ProcessModelService(FileSystemService):
data = json.load(cat_json)
# we don't store `id` in the json files, so we add it back in here
relative_path = os.path.relpath(dir_path, FileSystemService.root_path())
data["id"] = relative_path
data["id"] = cls.path_to_id(relative_path)
process_group = ProcessGroup(**data)
if process_group is None:
raise ApiError(
@ -421,7 +431,9 @@ class ProcessModelService(FileSystemService):
),
)
else:
process_group_id = dir_path.replace(FileSystemService.root_path(), "")
process_group_id = cls.path_to_id(
dir_path.replace(FileSystemService.root_path(), "")
)
process_group = ProcessGroup(
id="",
display_name=process_group_id,
@ -439,12 +451,12 @@ class ProcessModelService(FileSystemService):
for nested_item in nested_items:
if nested_item.is_dir():
# TODO: check whether this is a group or model
if cls.is_group(nested_item.path):
if cls.is_process_group(nested_item.path):
# This is a nested group
process_group.process_groups.append(
cls.find_or_create_process_group(nested_item.path)
)
elif ProcessModelService.is_model(nested_item.path):
elif ProcessModelService.is_process_model(nested_item.path):
process_group.process_models.append(
cls.__scan_process_model(
nested_item.path,
@ -474,11 +486,7 @@ class ProcessModelService(FileSystemService):
data.pop("process_group_id")
# we don't save `id` in the json file, so we add it back in here.
relative_path = os.path.relpath(path, FileSystemService.root_path())
# even on windows, use forward slashes for ids
relative_path = relative_path.replace("\\", "/")
data["id"] = relative_path
data["id"] = cls.path_to_id(relative_path)
process_model_info = ProcessModelInfo(**data)
if process_model_info is None:
raise ApiError(

View File

@ -55,9 +55,6 @@ class ServiceTaskDelegate:
f"{connector_proxy_url()}/v1/do/{name}", json=params
)
if proxied_response.status_code != 200:
print("got error from connector proxy")
parsed_response = json.loads(proxied_response.text)
if "refreshed_token_set" not in parsed_response:
@ -86,7 +83,7 @@ class ServiceTaskService:
parsed_response = json.loads(response.text)
return parsed_response
except Exception as e:
print(e)
current_app.logger.error(e)
return []
@staticmethod

View File

@ -6,7 +6,8 @@ from typing import List
from typing import Optional
from flask_bpmn.models.db import db
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from lxml import etree # type: ignore
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator # type: ignore
from spiffworkflow_backend.models.file import File
from spiffworkflow_backend.models.file import FileType
@ -29,6 +30,10 @@ class ProcessModelFileNotFoundError(Exception):
"""ProcessModelFileNotFoundError."""
class ProcessModelFileInvalidError(Exception):
"""ProcessModelFileInvalidError."""
class SpecFileService(FileSystemService):
"""SpecFileService."""
@ -44,7 +49,6 @@ class SpecFileService(FileSystemService):
extension_filter: str = "",
) -> List[File]:
"""Return all files associated with a workflow specification."""
# path = SpecFileService.workflow_path(process_model_info)
path = os.path.join(
FileSystemService.root_path(), process_model_info.id_for_file_path()
)
@ -76,9 +80,28 @@ class SpecFileService(FileSystemService):
)
return references
@staticmethod
@classmethod
def get_references_for_file(
file: File, process_model_info: ProcessModelInfo
cls, file: File, process_model_info: ProcessModelInfo
) -> list[SpecReference]:
"""Get_references_for_file."""
full_file_path = SpecFileService.full_file_path(process_model_info, file.name)
file_contents: bytes = b""
with open(full_file_path) as f:
file_contents = f.read().encode()
return cls.get_references_for_file_contents(
process_model_info, file.name, file_contents
)
@classmethod
def get_etree_from_xml_bytes(cls, binary_data: bytes) -> etree.Element:
"""Get_etree_from_xml_bytes."""
etree_xml_parser = etree.XMLParser(resolve_entities=False)
return etree.fromstring(binary_data, parser=etree_xml_parser)
@classmethod
def get_references_for_file_contents(
cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
) -> list[SpecReference]:
"""Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced.
@ -89,8 +112,8 @@ class SpecFileService(FileSystemService):
type = {str} 'process' / 'decision'
"""
references: list[SpecReference] = []
full_file_path = SpecFileService.full_file_path(process_model_info, file.name)
file_path = os.path.join(process_model_info.id_for_file_path(), file.name)
file_path = os.path.join(process_model_info.id_for_file_path(), file_name)
file_type = FileSystemService.file_type(file_name)
parser = MyCustomParser()
parser_type = None
sub_parser = None
@ -100,14 +123,14 @@ class SpecFileService(FileSystemService):
messages = {}
correlations = {}
start_messages = []
if file.type == FileType.bpmn.value:
parser.add_bpmn_file(full_file_path)
if file_type.value == FileType.bpmn.value:
parser.add_bpmn_xml(cls.get_etree_from_xml_bytes(binary_data))
parser_type = "process"
sub_parsers = list(parser.process_parsers.values())
messages = parser.messages
correlations = parser.correlations
elif file.type == FileType.dmn.value:
parser.add_dmn_file(full_file_path)
elif file_type.value == FileType.dmn.value:
parser.add_dmn_xml(cls.get_etree_from_xml_bytes(binary_data))
sub_parsers = list(parser.dmn_parsers.values())
parser_type = "decision"
else:
@ -127,7 +150,7 @@ class SpecFileService(FileSystemService):
display_name=sub_parser.get_name(),
process_model_id=process_model_info.id,
type=parser_type,
file_name=file.name,
file_name=file_name,
relative_path=file_path,
has_lanes=has_lanes,
is_executable=is_executable,
@ -147,23 +170,38 @@ class SpecFileService(FileSystemService):
# Same as update
return SpecFileService.update_file(process_model_info, file_name, binary_data)
@staticmethod
@classmethod
def validate_bpmn_xml(cls, file_name: str, binary_data: bytes) -> None:
"""Validate_bpmn_xml."""
file_type = FileSystemService.file_type(file_name)
if file_type.value == FileType.bpmn.value:
validator = BpmnValidator()
parser = MyCustomParser(validator=validator)
try:
parser.add_bpmn_xml(
cls.get_etree_from_xml_bytes(binary_data), filename=file_name
)
except etree.XMLSyntaxError as exception:
raise ProcessModelFileInvalidError(
f"Received error trying to parse bpmn xml: {str(exception)}"
) from exception
@classmethod
def update_file(
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
) -> File:
"""Update_file."""
SpecFileService.assert_valid_file_name(file_name)
full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
SpecFileService.write_file_data_to_system(full_file_path, binary_data)
file = SpecFileService.to_file_object(file_name, full_file_path)
cls.validate_bpmn_xml(file_name, binary_data)
references = SpecFileService.get_references_for_file(file, process_model_info)
references = cls.get_references_for_file_contents(
process_model_info, file_name, binary_data
)
primary_process_ref = next(
(ref for ref in references if ref.is_primary and ref.is_executable), None
)
SpecFileService.clear_caches_for_file(file_name, process_model_info)
for ref in references:
# If no valid primary process is defined, default to the first process in the
# updated file.
@ -184,7 +222,11 @@ class SpecFileService(FileSystemService):
update_hash,
)
SpecFileService.update_caches(ref)
return file
# make sure we save the file as the last thing we do to ensure validations have run
full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
SpecFileService.write_file_data_to_system(full_file_path, binary_data)
return SpecFileService.to_file_object(file_name, full_file_path)
@staticmethod
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
@ -282,7 +324,7 @@ class SpecFileService(FileSystemService):
# if the old relative bpmn file no longer exists, then assume things were moved around
# on the file system. Otherwise, assume it is a duplicate process id and error.
if os.path.isfile(full_bpmn_file_path):
raise ValidationException(
raise ProcessModelFileInvalidError(
f"Process id ({ref.identifier}) has already been used for "
f"{process_id_lookup.relative_path}. It cannot be reused."
)
@ -314,7 +356,7 @@ class SpecFileService(FileSystemService):
identifier=message_model_identifier
).first()
if message_model is None:
raise ValidationException(
raise ProcessModelFileInvalidError(
"Could not find message model with identifier"
f" '{message_model_identifier}'Required by a Start Event in :"
f" {ref.file_name}"
@ -336,7 +378,7 @@ class SpecFileService(FileSystemService):
message_triggerable_process_model.process_model_identifier
!= ref.process_model_id
):
raise ValidationException(
raise ProcessModelFileInvalidError(
"Message model is already used to start process model"
f" {ref.process_model_id}"
)
@ -355,7 +397,7 @@ class SpecFileService(FileSystemService):
identifier=message_model_identifier
).first()
if message_model is None:
raise ValidationException(
raise ProcessModelFileInvalidError(
"Could not find message model with identifier"
f" '{message_model_identifier}'specified by correlation"
f" property: {cpre}"

View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="ManualTaskToCallFromCallActivityToTestWaitingLogs" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1nxz6rd</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1nxz6rd" sourceRef="StartEvent_1" targetRef="the_manual_task" />
<bpmn:endEvent id="Event_19yodox">
<bpmn:incoming>Flow_1jtitb1</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1jtitb1" sourceRef="the_manual_task" targetRef="Event_19yodox" />
<bpmn:manualTask id="the_manual_task" name="The Manual Task">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser>NOOOOOOOOOOOOOOOOOO!!!!!!!!!!</spiffworkflow:instructionsForEndUser>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1nxz6rd</bpmn:incoming>
<bpmn:outgoing>Flow_1jtitb1</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ManualTaskToCallFromCallActivityToTestWaitingLogs">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_19yodox_di" bpmnElement="Event_19yodox">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0tl8vo6_di" bpmnElement="the_manual_task">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1nxz6rd_di" bpmnElement="Flow_1nxz6rd">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1jtitb1_di" bpmnElement="Flow_1jtitb1">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_zywnms5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0fdzi5f</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0fdzi5f" sourceRef="StartEvent_1" targetRef="call_activity_to_human_task" />
<bpmn:endEvent id="Event_1dzrts2">
<bpmn:incoming>Flow_0ii0wgu</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ii0wgu" sourceRef="call_activity_to_human_task" targetRef="Event_1dzrts2" />
<bpmn:callActivity id="call_activity_to_human_task" name="Call Activity To Human Task" calledElement="ManualTaskToCallFromCallActivityToTestWaitingLogs">
<bpmn:incoming>Flow_0fdzi5f</bpmn:incoming>
<bpmn:outgoing>Flow_0ii0wgu</bpmn:outgoing>
</bpmn:callActivity>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_zywnms5">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1dzrts2_di" bpmnElement="Event_1dzrts2">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0qb9821_di" bpmnElement="call_activity_to_human_task">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0fdzi5f_di" bpmnElement="Flow_0fdzi5f">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ii0wgu_di" bpmnElement="Flow_0ii0wgu">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -2,10 +2,10 @@
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:correlationProperty id="message_correlation_property" name="Message Correlation Property">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
<bpmn:formalExpression>to</bpmn:formalExpression>
<bpmn:messagePath>to</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
<bpmn:formalExpression>from.name</bpmn:formalExpression>
<bpmn:messagePath>from.name</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:message id="message_send" name="Message Send">
@ -20,7 +20,7 @@
</bpmn:message>
<bpmn:correlationProperty id="correlation_property_one" name="Correlation Property One">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
<bpmn:formalExpression>new</bpmn:formalExpression>
<bpmn:messagePath>new</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:process id="test_dot_notation" name="Test Dot Notation" isExecutable="true">

View File

@ -12,18 +12,18 @@
</bpmn:collaboration>
<bpmn:correlationProperty id="message_correlation_property_topica" name="Message Correlation Property TopicA">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
<bpmn:formalExpression>topica</bpmn:formalExpression>
<bpmn:messagePath>topica</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
<bpmn:formalExpression>the_payload.topica</bpmn:formalExpression>
<bpmn:messagePath>the_payload.topica</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:correlationProperty id="message_correlation_property_topicb" name="Message Correlation Property TopicB">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
<bpmn:formalExpression>topicb</bpmn:formalExpression>
<bpmn:messagePath>topicb</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
<bpmn:formalExpression>the_payload.topicb</bpmn:formalExpression>
<bpmn:messagePath>the_payload.topicb</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:message id="message_send" name="Message Send">

View File

@ -12,18 +12,18 @@
</bpmn:collaboration>
<bpmn:correlationProperty id="message_correlation_property_topica" name="Message Correlation Property TopicA">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
<bpmn:formalExpression>topica</bpmn:formalExpression>
<bpmn:messagePath>topica</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
<bpmn:formalExpression>the_payload.topica</bpmn:formalExpression>
<bpmn:messagePath>the_payload.topica</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:correlationProperty id="message_correlation_property_topicb" name="Message Correlation Property TopicB">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
<bpmn:formalExpression>topicb</bpmn:formalExpression>
<bpmn:messagePath>topicb</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
<bpmn:formalExpression>the_payload.topicb</bpmn:formalExpression>
<bpmn:messagePath>the_payload.topicb</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:process id="message_send_process" name="Message Send Process" isExecutable="true">

View File

@ -12,18 +12,18 @@
</bpmn:collaboration>
<bpmn:correlationProperty id="mcp_topica_one" name="MCP TopicA One">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_one">
<bpmn:formalExpression>topica_one</bpmn:formalExpression>
<bpmn:messagePath>topica_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_one">
<bpmn:formalExpression>topica_one</bpmn:formalExpression>
<bpmn:messagePath>topica_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:correlationProperty id="mcp_topicb_one" name="MCP TopicB_one">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_one">
<bpmn:formalExpression>topicb_one</bpmn:formalExpression>
<bpmn:messagePath>topicb_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_one">
<bpmn:formalExpression>topicb_one</bpmn:formalExpression>
<bpmn:messagePath>topicb_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:message id="message_send_one" name="Message Send One">

View File

@ -12,18 +12,18 @@
</bpmn:collaboration>
<bpmn:correlationProperty id="mcp_topica_two" name="MCP TopicA Two">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_two">
<bpmn:formalExpression>topica_two</bpmn:formalExpression>
<bpmn:messagePath>topica_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_two">
<bpmn:formalExpression>topica_two</bpmn:formalExpression>
<bpmn:messagePath>topica_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:correlationProperty id="mcp_topicb_two" name="MCP TopicB_two">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_two">
<bpmn:formalExpression>topicb_two</bpmn:formalExpression>
<bpmn:messagePath>topicb_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_two">
<bpmn:formalExpression>topicb_two</bpmn:formalExpression>
<bpmn:messagePath>topicb_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:message id="message_send_two" name="Message Send Two">

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:collaboration id="Collaboration_0oye1os" messages="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]">
<bpmn:collaboration id="Collaboration_0oye1os">
<bpmn:participant id="message_initiator" name="Message Initiator" processRef="message_send_process" />
<bpmn:participant id="message-receiver-one" name="Message Receiver One" />
<bpmn:participant id="message-receiver-two" name="Message Receiver Two" />
@ -19,18 +19,18 @@
</bpmn:collaboration>
<bpmn:correlationProperty id="mcp_topica_one" name="MCP TopicA One">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_one">
<bpmn:formalExpression>topica_one</bpmn:formalExpression>
<bpmn:messagePath>topica_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_one">
<bpmn:formalExpression>payload_var_one.topica_one</bpmn:formalExpression>
<bpmn:messagePath>payload_var_one.topica_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:correlationProperty id="mcp_topicb_one" name="MCP TopicB_one">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_one">
<bpmn:formalExpression>topicb_one</bpmn:formalExpression>
<bpmn:messagePath>topicb_one</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_one">
<bpmn:formalExpression>payload_var_one.topicb</bpmn:formalExpression>
<bpmn:messagePath>payload_var_one.topicb</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:process id="message_send_process" name="Message Send Process" isExecutable="true">
@ -117,18 +117,18 @@ del time</bpmn:script>
</bpmn:message>
<bpmn:correlationProperty id="mcp_topica_two" name="MCP Topica Two">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_two">
<bpmn:formalExpression>topica_two</bpmn:formalExpression>
<bpmn:messagePath>topica_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_two">
<bpmn:formalExpression>topica_two</bpmn:formalExpression>
<bpmn:messagePath>topica_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmn:correlationProperty id="mcp_topicb_two" name="MCP Topicb Two">
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send_two">
<bpmn:formalExpression>topicb_two</bpmn:formalExpression>
<bpmn:messagePath>topicb_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response_two">
<bpmn:formalExpression>topicb_two</bpmn:formalExpression>
<bpmn:messagePath>topicb_two</bpmn:messagePath>
</bpmn:correlationPropertyRetrievalExpression>
</bpmn:correlationProperty>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">

View File

@ -0,0 +1 @@
THIS_STRING_SHOULD_NOT_EXIST_ITS_SECRET

View File

@ -0,0 +1,6 @@
<!--?xml version="1.0" ?-->
<!DOCTYPE replace [<!ENTITY ent SYSTEM "file://{{FULL_PATH_TO_FILE}}"> ]>
<userInfo>
<firstName>John</firstName>
<lastName>&ent;</lastName>
</userInfo>

View File

@ -139,7 +139,7 @@ class BaseTest:
process_group_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), process_group_id)
)
if ProcessModelService.is_group(process_group_path):
if ProcessModelService.is_process_group(process_group_path):
if exception_notification_addresses is None:
exception_notification_addresses = []
@ -173,11 +173,11 @@ class BaseTest:
" model"
)
def get_test_data_file_contents(
def get_test_data_file_full_path(
self, file_name: str, process_model_test_data_dir: str
) -> bytes:
) -> str:
"""Get_test_data_file_contents."""
file_full_path = os.path.join(
return os.path.join(
current_app.instance_path,
"..",
"..",
@ -186,6 +186,14 @@ class BaseTest:
process_model_test_data_dir,
file_name,
)
def get_test_data_file_contents(
self, file_name: str, process_model_test_data_dir: str
) -> bytes:
"""Get_test_data_file_contents."""
file_full_path = self.get_test_data_file_full_path(
file_name, process_model_test_data_dir
)
with open(file_full_path, "rb") as file:
return file.read()
@ -251,9 +259,9 @@ class BaseTest:
There must be an existing process model to instantiate.
"""
if not ProcessModelService.is_model_identifier(test_process_model_id):
if not ProcessModelService.is_process_model_identifier(test_process_model_id):
dirname = os.path.dirname(test_process_model_id)
if not ProcessModelService.is_group_identifier(dirname):
if not ProcessModelService.is_process_group_identifier(dirname):
process_group = ProcessGroup(id=dirname, display_name=dirname)
ProcessModelService.add_process_group(process_group)
basename = os.path.basename(test_process_model_id)

View File

@ -57,7 +57,7 @@ class TestLoggingService(BaseTest):
assert response.status_code == 200
log_response = client.get(
f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?detailed=true",
headers=headers,
)
assert log_response.status_code == 200

View File

@ -232,7 +232,7 @@ class TestProcessApi(BaseTest):
"process_model_display_name",
"start_in_seconds",
"end_in_seconds",
"username",
"process_initiator_username",
"status",
"summary",
"description",
@ -860,7 +860,7 @@ class TestProcessApi(BaseTest):
assert response.status_code == 200
assert response.json is not None
assert response.json["ok"]
assert response.json["file_contents"] is not None
response = client.get(
f"/v1.0/process-models/{modified_process_model_id}/files/random_fact.svg",
@ -1296,16 +1296,16 @@ class TestProcessApi(BaseTest):
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
def test_message_start_when_starting_process_instance(
def test_message_send_when_starting_process_instance(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_message_start_when_starting_process_instance."""
"""Test_message_send_when_starting_process_instance."""
# ensure process model is loaded
process_group_id = "test_message_start"
process_group_id = "test_message_send"
process_model_id = "message_receiver"
bpmn_file_name = "message_receiver.bpmn"
bpmn_file_location = "message_send_one_conversation"
@ -1345,15 +1345,15 @@ class TestProcessApi(BaseTest):
assert process_instance_data
assert process_instance_data["the_payload"] == payload
def test_message_start_when_providing_message_to_running_process_instance(
def test_message_send_when_providing_message_to_running_process_instance(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_message_start_when_providing_message_to_running_process_instance."""
process_group_id = "test_message_start"
"""Test_message_send_when_providing_message_to_running_process_instance."""
process_group_id = "test_message_send"
process_model_id = "message_sender"
bpmn_file_name = "message_sender.bpmn"
bpmn_file_location = "message_send_one_conversation"
@ -1412,6 +1412,105 @@ class TestProcessApi(BaseTest):
assert process_instance_data
assert process_instance_data["the_payload"] == payload
def test_message_send_errors_when_providing_message_to_suspended_process_instance(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_message_send_when_providing_message_to_running_process_instance."""
process_group_id = "test_message_send"
process_model_id = "message_sender"
bpmn_file_name = "message_sender.bpmn"
bpmn_file_location = "message_send_one_conversation"
process_model_identifier = self.create_group_and_model_with_bpmn(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
message_model_identifier = "message_response"
payload = {
"the_payload": {
"topica": "the_payload.topica_string",
"topicb": "the_payload.topicb_string",
"andThis": "another_item_non_key",
}
}
response = self.create_process_instance_from_process_model_id_with_api(
client,
process_model_identifier,
self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
process_instance = ProcessInstanceModel.query.filter_by(
id=process_instance_id
).first()
processor = ProcessInstanceProcessor(process_instance)
processor.suspend()
response = client.post(
f"/v1.0/messages/{message_model_identifier}",
content_type="application/json",
headers=self.logged_in_headers(with_super_admin_user),
data=json.dumps(
{"payload": payload, "process_instance_id": process_instance_id}
),
)
assert response.status_code == 400
assert response.json
assert response.json["error_code"] == "process_instance_is_suspended"
processor.resume()
response = client.post(
f"/v1.0/messages/{message_model_identifier}",
content_type="application/json",
headers=self.logged_in_headers(with_super_admin_user),
data=json.dumps(
{"payload": payload, "process_instance_id": process_instance_id}
),
)
assert response.status_code == 200
json_data = response.json
assert json_data
assert json_data["status"] == "complete"
process_instance_id = json_data["id"]
process_instance = ProcessInstanceModel.query.filter_by(
id=process_instance_id
).first()
assert process_instance
processor = ProcessInstanceProcessor(process_instance)
process_instance_data = processor.get_data()
assert process_instance_data
assert process_instance_data["the_payload"] == payload
processor.terminate()
response = client.post(
f"/v1.0/messages/{message_model_identifier}",
content_type="application/json",
headers=self.logged_in_headers(with_super_admin_user),
data=json.dumps(
{"payload": payload, "process_instance_id": process_instance_id}
),
)
assert response.status_code == 400
assert response.json
assert response.json["error_code"] == "process_instance_is_terminated"
def test_process_instance_can_be_terminated(
self,
app: Flask,
@ -1419,9 +1518,9 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_message_start_when_providing_message_to_running_process_instance."""
"""Test_message_send_when_providing_message_to_running_process_instance."""
# this task will wait on a catch event
process_group_id = "test_message_start"
process_group_id = "test_message_send"
process_model_id = "message_sender"
bpmn_file_name = "message_sender.bpmn"
bpmn_file_location = "message_send_one_conversation"
@ -2061,59 +2160,10 @@ class TestProcessApi(BaseTest):
assert process is not None
assert process.status == "suspended"
def test_error_handler_with_email(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_error_handler."""
process_group_id = "data"
process_model_id = "error"
bpmn_file_name = "error.bpmn"
bpmn_file_location = "error"
process_model_identifier = self.create_group_and_model_with_bpmn(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
process_instance_id = self.setup_testing_instance(
client, process_model_identifier, with_super_admin_user
)
process_model = ProcessModelService.get_process_model(process_model_identifier)
ProcessModelService.update_process_model(
process_model,
{"exception_notification_addresses": ["with_super_admin_user@example.com"]},
)
mail = app.config["MAIL_APP"]
with mail.record_messages() as outbox:
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
assert len(outbox) == 1
message = outbox[0]
assert message.subject == "Unexpected error in app"
assert (
message.body == 'TypeError:can only concatenate str (not "int") to str'
)
assert message.recipients == process_model.exception_notification_addresses
process = (
db.session.query(ProcessInstanceModel)
.filter(ProcessInstanceModel.id == process_instance_id)
.first()
)
assert process is not None
assert process.status == "error"
def test_error_handler_system_notification(self) -> None:
"""Test_error_handler_system_notification."""
# TODO: make sure the system notification process is run on exceptions
...
def test_task_data_is_set_even_if_process_instance_errors(
self,
@ -2188,7 +2238,7 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_message_instances_by_process_instance_id."""
process_group_id = "test_message_start"
process_group_id = "test_message_send"
process_model_id = "message_receiver"
bpmn_file_name = "message_receiver.bpmn"
bpmn_file_location = "message_send_one_conversation"
@ -3046,6 +3096,101 @@ class TestProcessApi(BaseTest):
assert response.json["pagination"]["pages"] == 1
assert response.json["pagination"]["total"] == 1
def test_can_get_process_instance_list_with_report_metadata_and_process_initiator(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata_and_process_initator."""
user_one = self.create_user_with_permission(username="user_one")
process_model = load_test_spec(
process_model_id=(
"save_process_instance_metadata/save_process_instance_metadata"
),
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
self.create_process_instance_from_process_model(
process_model=process_model, user=user_one
)
self.create_process_instance_from_process_model(
process_model=process_model, user=user_one
)
self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
dne_report_metadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Process Initiator", "accessor": "username"},
],
"order_by": ["status"],
"filter_by": [
{
"field_name": "process_initiator_username",
"field_value": "DNE",
"operator": "equals",
}
],
}
user_one_report_metadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Process Initiator", "accessor": "username"},
],
"order_by": ["status"],
"filter_by": [
{
"field_name": "process_initiator_username",
"field_value": user_one.username,
"operator": "equals",
}
],
}
process_instance_report_dne = ProcessInstanceReportModel.create_with_attributes(
identifier="dne_report",
report_metadata=dne_report_metadata,
user=user_one,
)
process_instance_report_user_one = (
ProcessInstanceReportModel.create_with_attributes(
identifier="user_one_report",
report_metadata=user_one_report_metadata,
user=user_one,
)
)
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report_user_one.identifier}",
headers=self.logged_in_headers(user_one),
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 2
assert (
response.json["results"][0]["process_initiator_username"]
== user_one.username
)
assert (
response.json["results"][1]["process_initiator_username"]
== user_one.username
)
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report_dne.identifier}",
headers=self.logged_in_headers(user_one),
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 0
def test_can_get_process_instance_report_column_list(
self,
app: Flask,
@ -3088,7 +3233,11 @@ class TestProcessApi(BaseTest):
},
{"Header": "Start", "accessor": "start_in_seconds", "filterable": False},
{"Header": "End", "accessor": "end_in_seconds", "filterable": False},
{"Header": "Username", "accessor": "username", "filterable": False},
{
"Header": "Started By",
"accessor": "process_initiator_username",
"filterable": False,
},
{"Header": "Status", "accessor": "status", "filterable": False},
{"Header": "key1", "accessor": "key1", "filterable": True},
{"Header": "key2", "accessor": "key2", "filterable": True},

View File

@ -18,15 +18,15 @@ def test_start_dates_are_one_hour_apart(app: Flask) -> None:
)
group_identifier = os.path.dirname(process_model_identifier)
parent_group_identifier = os.path.dirname(group_identifier)
if not ProcessModelService.is_group(parent_group_identifier):
if not ProcessModelService.is_process_group(parent_group_identifier):
process_group = ProcessGroup(
id=parent_group_identifier, display_name=parent_group_identifier
)
ProcessModelService.add_process_group(process_group)
if not ProcessModelService.is_group(group_identifier):
if not ProcessModelService.is_process_group(group_identifier):
process_group = ProcessGroup(id=group_identifier, display_name=group_identifier)
ProcessModelService.add_process_group(process_group)
if not ProcessModelService.is_model(process_model_identifier):
if not ProcessModelService.is_process_model(process_model_identifier):
process_model = ProcessModelInfo(
id=process_model_identifier,
display_name=process_model_identifier,

View File

@ -0,0 +1,46 @@
"""Test_process_instance_processor."""
import os
from flask.app import Flask
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
class TestProcessInstanceService(BaseTest):
"""TestProcessInstanceService."""
def test_does_not_log_set_data_when_calling_engine_steps_on_waiting_call_activity(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_does_not_log_set_data_when_calling_engine_steps_on_waiting_call_activity."""
tmp_file = '/tmp/testfile.txt'
if os.path.isfile(tmp_file):
os.remove(tmp_file)
process_model = load_test_spec(
process_model_id="test_group/call-activity-to-human-task",
process_model_source_directory="call-activity-to-human-task",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all()
initial_length = len(process_instance_logs)
# logs should NOT increase after running this a second time since it's just waiting on a human task
print("HEY NOW")
with open(tmp_file, 'w') as f:
f.write("HEY")
processor.do_engine_steps(save=True)
process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all()
assert len(process_instance_logs) == initial_length

View File

@ -1,17 +1,21 @@
"""Test_message_service."""
import os
import sys
import pytest
from flask import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from lxml import etree # type: ignore
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import (
ProcessModelFileInvalidError,
)
from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -74,7 +78,7 @@ class TestSpecFileService(BaseTest):
bpmn_process_id_lookups[0].relative_path
== self.call_activity_nested_relative_file_path
)
with pytest.raises(ValidationException) as exception:
with pytest.raises(ProcessModelFileInvalidError) as exception:
load_test_spec(
"call_activity_nested_duplicate",
process_model_source_directory="call_activity_duplicate",
@ -85,6 +89,14 @@ class TestSpecFileService(BaseTest):
in str(exception.value)
)
process_model = ProcessModelService.get_process_model(
"call_activity_nested_duplicate"
)
full_file_path = SpecFileService.full_file_path(
process_model, "call_activity_nested_duplicate.bpmn"
)
assert not os.path.isfile(full_file_path)
def test_updates_relative_file_path_when_appropriate(
self,
app: Flask,
@ -206,3 +218,49 @@ class TestSpecFileService(BaseTest):
assert dmn1[0].display_name == "Decision 1"
assert dmn1[0].identifier == "Decision_0vrtcmk"
assert dmn1[0].type == "decision"
def test_validate_bpmn_xml_with_invalid_xml(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_validate_bpmn_xml_with_invalid_xml."""
process_model = load_test_spec(
process_model_id="group/invalid_xml",
bpmn_file_name="script_error_with_task_data.bpmn",
process_model_source_directory="error",
)
with pytest.raises(ProcessModelFileInvalidError):
SpecFileService.update_file(
process_model, "bad_xml.bpmn", b"THIS_IS_NOT_VALID_XML"
)
full_file_path = SpecFileService.full_file_path(process_model, "bad_xml.bpmn")
assert not os.path.isfile(full_file_path)
@pytest.mark.skipif(
sys.platform == "win32",
reason="tmp file path is not valid xml for windows and it doesn't matter",
)
def test_does_not_evaluate_entities(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_does_not_evaluate_entities."""
string_replacement = b"THIS_STRING_SHOULD_NOT_EXIST_ITS_SECRET"
tmp_file = os.path.normpath(
self.get_test_data_file_full_path("file_to_inject", "xml_with_entity")
)
file_contents = self.get_test_data_file_contents(
"invoice.bpmn", "xml_with_entity"
)
file_contents = (
file_contents.decode("utf-8")
.replace("{{FULL_PATH_TO_FILE}}", tmp_file)
.encode()
)
etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents)
assert string_replacement not in etree.tostring(etree_element)