Merge remote-tracking branch 'origin/main' into frontend/use-api-subpath
This commit is contained in:
commit
154dab471f
|
@ -10,7 +10,6 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
for subtree in "SpiffWorkflow" \
|
||||
"spiffworkflow-backend" \
|
||||
"spiffworkflow-frontend" \
|
||||
"flask-bpmn" \
|
||||
"bpmn-js-spiffworkflow" \
|
||||
"connector-proxy-demo"
|
||||
do
|
||||
|
|
|
@ -13,10 +13,8 @@ services:
|
|||
- "${SPIFF_FRONTEND_PORT:-8001}:${SPIFF_FRONTEND_PORT:-8001}/tcp"
|
||||
|
||||
spiffworkflow-backend:
|
||||
# container_name: spiffworkflow-backend
|
||||
build: ./spiffworkflow-backend/.
|
||||
# dockerfile: Dockerfile
|
||||
# image: ghcr.io/sartography/spiffworkflow-backend:latest
|
||||
container_name: spiffworkflow-backend
|
||||
image: ghcr.io/sartography/spiffworkflow-backend:latest
|
||||
depends_on:
|
||||
spiffworkflow-db:
|
||||
condition: service_healthy
|
||||
|
|
|
@ -1760,7 +1760,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "1f51db962ccaed5810f5d0f7d76a932f056430ab"
|
||||
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -2848,18 +2848,7 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
|
|
|
@ -7,6 +7,14 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
|||
|
||||
WORKDIR /app
|
||||
|
||||
# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much.
|
||||
FROM base AS deployment
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get clean -y \
|
||||
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Setup image for installing Python dependencies.
|
||||
FROM base AS setup
|
||||
|
||||
|
@ -16,20 +24,20 @@ RUN useradd _gunicorn --no-create-home --user-group
|
|||
RUN apt-get update \
|
||||
&& apt-get install -y -q gcc libssl-dev libpq-dev
|
||||
|
||||
# poetry install takes a long time and can be cached if dependencies don't change,
|
||||
# so that's why we tolerate running it twice.
|
||||
COPY pyproject.toml poetry.lock /app/
|
||||
RUN poetry install --without dev
|
||||
|
||||
COPY . /app
|
||||
RUN poetry install --without dev
|
||||
|
||||
# Final image without setup dependencies.
|
||||
FROM base AS final
|
||||
FROM deployment AS final
|
||||
|
||||
LABEL source="https://github.com/sartography/spiff-arena"
|
||||
LABEL description="Software development platform for building, running, and monitoring executable diagrams"
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get clean -y \
|
||||
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=setup /app /app
|
||||
|
||||
ENTRYPOINT ["./bin/boot_server_in_docker"]
|
||||
CMD ["./bin/boot_server_in_docker"]
|
||||
|
|
|
@ -22,8 +22,8 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
# KEYCLOAK_BASE_URL=http://localhost:7002
|
||||
KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org
|
||||
BACKEND_BASE_URL=http://localhost:7000
|
||||
# BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
|
||||
# BACKEND_BASE_URL=http://localhost:7000
|
||||
BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
|
||||
REALM_NAME=spiffworkflow
|
||||
USERNAME=${1-fin}
|
||||
PASSWORD=${2-fin}
|
||||
|
@ -61,6 +61,7 @@ result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
|
|||
-d "client_id=$BACKEND_CLIENT_ID" \
|
||||
)
|
||||
backend_token=$(jq -r '.access_token' <<< "$result")
|
||||
echo "testing hitting backend with token: $backend_token"
|
||||
curl --fail -v "${BACKEND_BASE_URL}/v1.0/process-groups?per_page=1" -H "Authorization: Bearer $backend_token"
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,13 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
# you can get a list of users from the keycloak realm file like:
|
||||
# grep '"email" :' keycloak/realm_exports/spiffworkflow-realm.json | awk -F : '{print $2}' | sed -E 's/ "//g' | sed -E 's/",//g' > s
|
||||
|
||||
# we keep some of these in keycloak/test_user_lists
|
||||
# spiffworkflow-realm.json is a mashup of the status and sartography user lists.
|
||||
user_file_with_one_email_per_line="${1:-}"
|
||||
|
||||
keycloak_realm="${2:-spiffworkflow}"
|
||||
if [[ -z "${1:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"
|
||||
|
@ -38,14 +44,66 @@ result=$(curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
|
|||
)
|
||||
backend_token=$(jq -r '.access_token' <<< "$result")
|
||||
|
||||
while read -r user_email; do
|
||||
if [[ -n "$user_email" ]]; then
|
||||
username=$(awk -F '@' '{print $1}' <<<"$user_email")
|
||||
credentials='{"type":"password","value":"'"${username}"'","temporary":false}'
|
||||
function add_user() {
|
||||
local user_email=$1
|
||||
local username=$2
|
||||
local user_attribute_one=$3
|
||||
|
||||
curl --fail --location --request POST "http://localhost:7002/admin/realms/${keycloak_realm}/users" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token" \
|
||||
--data-raw '{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']}'
|
||||
local credentials='{"type":"password","value":"'"${username}"'","temporary":false}'
|
||||
|
||||
local data='{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']'
|
||||
if [[ -n "$user_attribute_one" ]]; then
|
||||
data=''${data}', "attributes": {"'${custom_attribute_one}'": [ "'$user_attribute_one'" ]}'
|
||||
fi
|
||||
data="${data}}"
|
||||
|
||||
local http_code
|
||||
http_code=$(curl --silent -o /dev/null -w '%{http_code}' --location --request POST "http://localhost:7002/admin/realms/${keycloak_realm}/users" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token" \
|
||||
--data-raw "$data")
|
||||
echo "$http_code"
|
||||
}
|
||||
|
||||
first_line_processed="false"
|
||||
custom_attribute_one=''
|
||||
|
||||
while read -r input_line; do
|
||||
if ! grep -qE '^#' <<<"$input_line" ; then
|
||||
if [[ "$first_line_processed" == "false" ]]; then
|
||||
email_header=$(awk -F ',' '{print $1}' <<<"$input_line")
|
||||
if [[ "$email_header" != "email" ]]; then
|
||||
>&2 echo "ERROR: the first column in the first row must be email."
|
||||
exit 1
|
||||
fi
|
||||
custom_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
|
||||
first_line_processed="true"
|
||||
elif [[ -n "$input_line" ]]; then
|
||||
user_email=$(awk -F ',' '{print $1}' <<<"$input_line")
|
||||
username=$(awk -F '@' '{print $1}' <<<"$user_email")
|
||||
user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
|
||||
http_code=$(add_user "$user_email" "$username" "$user_attribute_one")
|
||||
|
||||
if [[ "$http_code" == "409" ]]; then
|
||||
user_info=$(curl --fail --silent --location --request GET "http://localhost:7002/admin/realms/${keycloak_realm}/users?username=${username}&exact=true" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token")
|
||||
|
||||
user_id=$(jq -r '.[0] | .id' <<<"$user_info")
|
||||
if [[ -z "$user_id" ]]; then
|
||||
>&2 echo "ERROR: Could not find user_id for user: ${user_email}"
|
||||
exit 1
|
||||
fi
|
||||
curl --fail --location --silent --request DELETE "http://localhost:7002/admin/realms/${keycloak_realm}/users/${user_id}" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token"
|
||||
|
||||
http_code=$(add_user "$user_email" "$username" "$user_attribute_one")
|
||||
if [[ "$http_code" != "201" ]]; then
|
||||
>&2 echo "ERROR: Failed to recreate user: ${user_email} with http_code: ${http_code}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done <"$user_file_with_one_email_per_line"
|
||||
|
|
|
@ -21,6 +21,9 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa
|
|||
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
|
||||
|
||||
for realm in $realms ; do
|
||||
if ! grep -Eq '\-realm$' <<< "$realm"; then
|
||||
realm="${realm}-realm"
|
||||
fi
|
||||
cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/"
|
||||
done
|
||||
|
||||
|
|
|
@ -547,7 +547,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "kevin@sartography.com",
|
||||
"email" : "kb@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "4057e784-689d-47c0-a164-035a69e78edf",
|
||||
"type" : "password",
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,8 +1,14 @@
|
|||
alex@sartography.com
|
||||
dan@sartography.com
|
||||
kevin@sartography.com
|
||||
jason@sartography.com
|
||||
mike@sartography.com
|
||||
email,spiffworkflow-employeeid
|
||||
admin@spiffworkflow.org
|
||||
alex@sartography.com,111
|
||||
dan@sartography.com,115
|
||||
daniel@sartography.com
|
||||
elizabeth@sartography.com
|
||||
j@sartography.com
|
||||
jason@sartography.com
|
||||
jon@sartography.com
|
||||
kb@sartography.com
|
||||
kevin@sartography.com
|
||||
madhurya@sartography.com
|
||||
mike@sartography.com
|
||||
natalia@sartography.com
|
||||
|
|
|
@ -1,17 +1,52 @@
|
|||
finance.lead@status.im
|
||||
legal.lead@status.im
|
||||
program.lead@status.im
|
||||
services.lead@status.im
|
||||
finance.sme@status.im
|
||||
infra.sme@status.im
|
||||
legal.sme@status.im
|
||||
security.sme@status.im
|
||||
ppg.ba@status.im
|
||||
peopleops.partner@status.im
|
||||
peopleops.talent@status.im
|
||||
email,spiffworkflow-employeeid
|
||||
admin@spiffworkflow.org
|
||||
amir@status.im
|
||||
app.program.lead@status.im
|
||||
core@status.im,113
|
||||
dao.project.lead@status.im
|
||||
desktop.program.lead@status.im
|
||||
desktop.project.lead@status.im
|
||||
fin1@status.im
|
||||
fin@status.im,118
|
||||
finance.lead@status.im,1182
|
||||
finance.lead@status.im,1289
|
||||
finance_user1@status.im
|
||||
harmeet@status.im,109
|
||||
infra.program-lead@status.im
|
||||
infra.project-lead@status.im
|
||||
dao.project.lead@status.im
|
||||
desktop.project.lead@status.im
|
||||
app.program.lead@status.im
|
||||
desktop.program.lead@status.im
|
||||
infra.sme@status.im,1202
|
||||
infra1.sme@status.im
|
||||
infra2.sme@status.im
|
||||
jakub@status.im
|
||||
jarrad@status.im
|
||||
lead1@status.im
|
||||
lead@status.im,1140
|
||||
legal.lead@status.im,1243
|
||||
legal.program-lead.sme@status.im
|
||||
legal.program-lead@status.im
|
||||
legal.project-lead.sme@status.im
|
||||
legal.project-lead@status.im
|
||||
legal.sme1@status.im,1345
|
||||
legal.sme@status.im,1253
|
||||
legal1.sme@status.im
|
||||
manuchehr@status.im,110
|
||||
peopleops.partner@status.im
|
||||
peopleops.talent.program-lead@status.im
|
||||
peopleops.talent.project-lead@status.im
|
||||
peopleops.talent.sme@status.im
|
||||
peopleops.talent1.sme@status.im
|
||||
peopleops.talent@status.im,141
|
||||
ppg.ba.program-lead@status.im
|
||||
ppg.ba.project-lead@status.im
|
||||
ppg.ba.sme1@status.im,1398
|
||||
ppg.ba.sme@status.im,1387
|
||||
ppg.ba@status.im,1276
|
||||
program.lead@status.im,1211
|
||||
sasha@status.im,112
|
||||
security.program-lead.sme@status.im
|
||||
security.program-lead@status.im
|
||||
security.project-lead.sme@status.im
|
||||
security.project-lead@status.im
|
||||
security.sme@status.im,1230
|
||||
security1.sme@status.im
|
||||
services.lead@status.im
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: ca9b79dde5cc
|
||||
Revises: 2ec4222f0012
|
||||
Create Date: 2023-02-03 21:06:56.396816
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ca9b79dde5cc'
|
||||
down_revision = '2ec4222f0012'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('tenant_specific_field_1', sa.String(length=255), nullable=True))
|
||||
op.add_column('user', sa.Column('tenant_specific_field_2', sa.String(length=255), nullable=True))
|
||||
op.add_column('user', sa.Column('tenant_specific_field_3', sa.String(length=255), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('user', 'tenant_specific_field_3')
|
||||
op.drop_column('user', 'tenant_specific_field_2')
|
||||
op.drop_column('user', 'tenant_specific_field_1')
|
||||
# ### end Alembic commands ###
|
|
@ -1825,7 +1825,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331"
|
||||
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -2546,6 +2546,7 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
||||
|
@ -2554,6 +2555,7 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
||||
|
@ -2562,6 +2564,7 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
||||
|
@ -2863,10 +2866,7 @@ orjson = [
|
|||
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
|
||||
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
|
||||
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
|
||||
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""__init__."""
|
||||
import os
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import connexion # type: ignore
|
||||
|
@ -157,6 +158,29 @@ def get_hacked_up_app_for_script() -> flask.app.Flask:
|
|||
return app
|
||||
|
||||
|
||||
def traces_sampler(sampling_context: Any) -> Any:
|
||||
# always inherit
|
||||
if sampling_context["parent_sampled"] is not None:
|
||||
return sampling_context["parent_sampled"]
|
||||
|
||||
if "wsgi_environ" in sampling_context:
|
||||
wsgi_environ = sampling_context["wsgi_environ"]
|
||||
path_info = wsgi_environ.get("PATH_INFO")
|
||||
request_method = wsgi_environ.get("REQUEST_METHOD")
|
||||
|
||||
# tasks_controller.task_submit
|
||||
# this is the current pain point as of 31 jan 2023.
|
||||
if (
|
||||
path_info
|
||||
and path_info.startswith("/v1.0/tasks/")
|
||||
and request_method == "PUT"
|
||||
):
|
||||
return 1
|
||||
|
||||
# Default sample rate for all others (replaces traces_sample_rate)
|
||||
return 0.01
|
||||
|
||||
|
||||
def configure_sentry(app: flask.app.Flask) -> None:
|
||||
"""Configure_sentry."""
|
||||
import sentry_sdk
|
||||
|
@ -180,6 +204,9 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
if sentry_traces_sample_rate is None:
|
||||
raise Exception("SENTRY_TRACES_SAMPLE_RATE is not set somehow")
|
||||
|
||||
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
|
||||
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=app.config.get("SENTRY_DSN"),
|
||||
integrations=[
|
||||
|
@ -193,5 +220,8 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
# of transactions for performance monitoring.
|
||||
# We recommend adjusting this value to less than 1(00%) in production.
|
||||
traces_sample_rate=float(sentry_traces_sample_rate),
|
||||
traces_sampler=traces_sampler,
|
||||
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
||||
_experiments={"profiles_sample_rate": profiles_sample_rate},
|
||||
before_send=before_send,
|
||||
)
|
||||
|
|
|
@ -1605,6 +1605,45 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/process-data-file-download/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: The modified id of an existing process model
|
||||
schema:
|
||||
type: string
|
||||
- name: process_instance_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process instance.
|
||||
schema:
|
||||
type: integer
|
||||
- name: process_data_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: The identifier of the process data.
|
||||
schema:
|
||||
type: string
|
||||
- name: index
|
||||
in: query
|
||||
required: false
|
||||
description: The optional index of the value if key's value is an array
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_file_download
|
||||
summary: Download the file referneced in the process data value.
|
||||
tags:
|
||||
- Data Objects
|
||||
responses:
|
||||
"200":
|
||||
description: Fetch succeeded.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/send-event/{modified_process_model_identifier}/{process_instance_id}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
|
|
|
@ -51,6 +51,19 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
|
|||
) from exception
|
||||
|
||||
|
||||
def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
|
||||
tenant_specific_fields = app.config.get("TENANT_SPECIFIC_FIELDS")
|
||||
|
||||
if tenant_specific_fields is None or tenant_specific_fields == "":
|
||||
app.config["TENANT_SPECIFIC_FIELDS"] = []
|
||||
else:
|
||||
app.config["TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",")
|
||||
if len(app.config["TENANT_SPECIFIC_FIELDS"]) > 3:
|
||||
raise ConfigurationError(
|
||||
"TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields"
|
||||
)
|
||||
|
||||
|
||||
def setup_config(app: Flask) -> None:
|
||||
"""Setup_config."""
|
||||
# ensure the instance folder exists
|
||||
|
@ -94,8 +107,6 @@ def setup_config(app: Flask) -> None:
|
|||
else:
|
||||
print("base_permissions: no permissions file loaded")
|
||||
|
||||
|
||||
|
||||
# unversioned (see .gitignore) config that can override everything and include secrets.
|
||||
# src/spiffworkflow_backend/config/secrets.py
|
||||
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)
|
||||
|
@ -110,3 +121,4 @@ def setup_config(app: Flask) -> None:
|
|||
|
||||
thread_local_data = threading.local()
|
||||
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
||||
_set_up_tenant_specific_fields_as_list_of_strings(app)
|
||||
|
|
|
@ -72,7 +72,7 @@ GIT_SSH_PRIVATE_KEY = environ.get("GIT_SSH_PRIVATE_KEY")
|
|||
GIT_USERNAME = environ.get("GIT_USERNAME")
|
||||
GIT_USER_EMAIL = environ.get("GIT_USER_EMAIL")
|
||||
|
||||
# Datbase Configuration
|
||||
# Database Configuration
|
||||
SPIFF_DATABASE_TYPE = environ.get(
|
||||
"SPIFF_DATABASE_TYPE", default="mysql"
|
||||
) # can also be sqlite, postgres
|
||||
|
@ -88,3 +88,8 @@ SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get(
|
|||
ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int(
|
||||
environ.get("ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600")
|
||||
)
|
||||
|
||||
# Tenant specific fields is a comma separated list of field names that we will convert to list of strings
|
||||
# and store in the user table's tenant_specific_field_n columns. You can have up to three items in this
|
||||
# comma-separated list.
|
||||
TENANT_SPECIFIC_FIELDS = environ.get("TENANT_SPECIFIC_FIELDS")
|
||||
|
|
|
@ -15,11 +15,17 @@ from flask import jsonify
|
|||
from flask import make_response
|
||||
from sentry_sdk import capture_exception
|
||||
from sentry_sdk import set_tag
|
||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
|
||||
from SpiffWorkflow.task import Task # type: ignore
|
||||
|
||||
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
|
||||
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
|
||||
|
||||
|
||||
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
|
||||
|
||||
|
@ -131,7 +137,7 @@ class ApiError(Exception):
|
|||
cls,
|
||||
error_code: str,
|
||||
message: str,
|
||||
exp: WorkflowException,
|
||||
exp: SpiffWorkflowException,
|
||||
) -> ApiError:
|
||||
"""Deals with workflow exceptions.
|
||||
|
||||
|
@ -140,6 +146,7 @@ class ApiError(Exception):
|
|||
we can with the data we have.
|
||||
"""
|
||||
if isinstance(exp, WorkflowTaskException):
|
||||
# Note that WorkflowDataExceptions are also WorkflowTaskExceptions
|
||||
return ApiError.from_task(
|
||||
error_code,
|
||||
message,
|
||||
|
@ -150,9 +157,10 @@ class ApiError(Exception):
|
|||
error_line=exp.error_line,
|
||||
task_trace=exp.task_trace,
|
||||
)
|
||||
|
||||
else:
|
||||
elif isinstance(exp, WorkflowException):
|
||||
return ApiError.from_task_spec(error_code, message, exp.task_spec)
|
||||
else:
|
||||
return ApiError("workflow_error", str(exp))
|
||||
|
||||
|
||||
def set_user_sentry_context() -> None:
|
||||
|
@ -166,13 +174,30 @@ def set_user_sentry_context() -> None:
|
|||
set_tag("username", username)
|
||||
|
||||
|
||||
def should_notify_sentry(exception: Exception) -> bool:
|
||||
"""Determine if we should notify sentry.
|
||||
|
||||
We want to capture_exception to log the exception to sentry, but we don't want to log:
|
||||
1. ApiErrors that are just invalid tokens
|
||||
2. NotAuthorizedError. we usually call check-permissions before calling an API to
|
||||
make sure we'll have access, but there are some cases
|
||||
where it's more convenient to just make the call from the frontend and handle the 403 appropriately.
|
||||
"""
|
||||
if isinstance(exception, ApiError):
|
||||
if exception.error_code == "invalid_token":
|
||||
return False
|
||||
if isinstance(exception, NotAuthorizedError):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@api_error_blueprint.app_errorhandler(Exception) # type: ignore
|
||||
def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||
"""Handles unexpected exceptions."""
|
||||
set_user_sentry_context()
|
||||
|
||||
sentry_link = None
|
||||
if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
|
||||
if should_notify_sentry(exception):
|
||||
id = capture_exception(exception)
|
||||
|
||||
if isinstance(exception, ApiError):
|
||||
|
@ -188,22 +213,45 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
|||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
# an event id or send out tags like username
|
||||
current_app.logger.exception(exception)
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
# an event id or send out tags like username
|
||||
current_app.logger.exception(exception)
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
f"Received exception: {exception}. Since we do not want this particular"
|
||||
" exception in sentry, we cannot use logger.exception or logger.error, so"
|
||||
" there will be no backtrace. see api_error.py"
|
||||
)
|
||||
|
||||
error_code = "internal_server_error"
|
||||
status_code = 500
|
||||
if (
|
||||
isinstance(exception, NotAuthorizedError)
|
||||
or isinstance(exception, TokenNotProvidedError)
|
||||
or isinstance(exception, TokenInvalidError)
|
||||
):
|
||||
error_code = "not_authorized"
|
||||
status_code = 403
|
||||
if isinstance(exception, UserNotLoggedInError):
|
||||
error_code = "not_authenticated"
|
||||
status_code = 401
|
||||
|
||||
# set api_exception like this to avoid confusing mypy
|
||||
# and what type the object is
|
||||
# about what type the object is
|
||||
api_exception = None
|
||||
if isinstance(exception, ApiError):
|
||||
api_exception = exception
|
||||
elif isinstance(exception, SpiffWorkflowException):
|
||||
api_exception = ApiError.from_workflow_exception(
|
||||
"unexpected_workflow_exception", "Unexpected Workflow Error", exception
|
||||
)
|
||||
else:
|
||||
api_exception = ApiError(
|
||||
error_code="internal_server_error",
|
||||
error_code=error_code,
|
||||
message=f"{exception.__class__.__name__}",
|
||||
sentry_link=sentry_link,
|
||||
status_code=500,
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return make_response(jsonify(api_exception), api_exception.status_code)
|
||||
|
|
|
@ -115,8 +115,8 @@ class Task:
|
|||
process_model_display_name: Union[str, None] = None,
|
||||
process_group_identifier: Union[str, None] = None,
|
||||
process_model_identifier: Union[str, None] = None,
|
||||
form_schema: Union[str, None] = None,
|
||||
form_ui_schema: Union[str, None] = None,
|
||||
form_schema: Union[dict, None] = None,
|
||||
form_ui_schema: Union[dict, None] = None,
|
||||
parent: Optional[str] = None,
|
||||
event_definition: Union[dict[str, Any], None] = None,
|
||||
call_activity_process_identifier: Optional[str] = None,
|
||||
|
|
|
@ -34,6 +34,9 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||
display_name = db.Column(db.String(255))
|
||||
email = db.Column(db.String(255))
|
||||
tenant_specific_field_1: str | None = db.Column(db.String(255))
|
||||
tenant_specific_field_2: str | None = db.Column(db.String(255))
|
||||
tenant_specific_field_3: str | None = db.Column(db.String(255))
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import base64
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import Blueprint
|
||||
|
@ -81,10 +83,12 @@ def process_list() -> Any:
|
|||
return SpecReferenceSchema(many=True).dump(references)
|
||||
|
||||
|
||||
def process_data_show(
|
||||
def _process_data_fetcher(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
download_file_data: bool,
|
||||
index: Optional[int] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
@ -94,6 +98,26 @@ def process_data_show(
|
|||
if process_data_identifier in all_process_data:
|
||||
process_data_value = all_process_data[process_data_identifier]
|
||||
|
||||
if process_data_value is not None and index is not None:
|
||||
process_data_value = process_data_value[index]
|
||||
|
||||
if (
|
||||
download_file_data
|
||||
and isinstance(process_data_value, str)
|
||||
and process_data_value.startswith("data:")
|
||||
):
|
||||
parts = process_data_value.split(";")
|
||||
mimetype = parts[0][4:]
|
||||
filename = parts[1]
|
||||
base64_value = parts[2].split(",")[1]
|
||||
file_contents = base64.b64decode(base64_value)
|
||||
|
||||
return Response(
|
||||
file_contents,
|
||||
mimetype=mimetype,
|
||||
headers={"Content-disposition": f"attachment; filename={filename}"},
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
|
@ -105,6 +129,37 @@ def process_data_show(
|
|||
)
|
||||
|
||||
|
||||
def process_data_show(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_show."""
|
||||
return _process_data_fetcher(
|
||||
process_instance_id,
|
||||
process_data_identifier,
|
||||
modified_process_model_identifier,
|
||||
False,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def process_data_file_download(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
index: Optional[int] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_file_download."""
|
||||
return _process_data_fetcher(
|
||||
process_instance_id,
|
||||
process_data_identifier,
|
||||
modified_process_model_identifier,
|
||||
True,
|
||||
index,
|
||||
)
|
||||
|
||||
|
||||
# sample body:
|
||||
# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
|
||||
# "full_name": "sartography/sample-process-models", "private": False .... }}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import json
|
||||
import os
|
||||
import uuid
|
||||
from sys import exc_info
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
@ -10,6 +11,7 @@ from typing import Union
|
|||
|
||||
import flask.wrappers
|
||||
import jinja2
|
||||
import sentry_sdk
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
|
@ -169,6 +171,25 @@ def task_list_for_my_groups(
|
|||
)
|
||||
|
||||
|
||||
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
|
||||
if task.form_ui_schema is None:
|
||||
task.form_ui_schema = {}
|
||||
|
||||
if task.data and "form_ui_hidden_fields" in task.data:
|
||||
hidden_fields = task.data["form_ui_hidden_fields"]
|
||||
for hidden_field in hidden_fields:
|
||||
hidden_field_parts = hidden_field.split(".")
|
||||
relevant_depth_of_ui_schema = task.form_ui_schema
|
||||
for ii, hidden_field_part in enumerate(hidden_field_parts):
|
||||
if hidden_field_part not in relevant_depth_of_ui_schema:
|
||||
relevant_depth_of_ui_schema[hidden_field_part] = {}
|
||||
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[
|
||||
hidden_field_part
|
||||
]
|
||||
if len(hidden_field_parts) == ii + 1:
|
||||
relevant_depth_of_ui_schema["ui:widget"] = "hidden"
|
||||
|
||||
|
||||
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
|
||||
"""Task_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
@ -184,20 +205,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
process_instance.process_model_identifier,
|
||||
)
|
||||
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
_find_human_task_or_raise(process_instance_id, task_id)
|
||||
|
||||
form_schema_file_name = ""
|
||||
form_ui_schema_file_name = ""
|
||||
|
@ -252,31 +260,16 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
)
|
||||
)
|
||||
|
||||
form_contents = _prepare_form_data(
|
||||
form_dict = _prepare_form_data(
|
||||
form_schema_file_name,
|
||||
spiff_task,
|
||||
process_model_with_form,
|
||||
)
|
||||
|
||||
try:
|
||||
# form_contents is a str
|
||||
form_dict = json.loads(form_contents)
|
||||
except Exception as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="error_loading_form",
|
||||
message=(
|
||||
f"Could not load form schema from: {form_schema_file_name}."
|
||||
f" Error was: {str(exception)}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
if task.data:
|
||||
_update_form_schema_with_task_data_as_needed(form_dict, task)
|
||||
_update_form_schema_with_task_data_as_needed(form_dict, task, spiff_task)
|
||||
|
||||
if form_contents:
|
||||
if form_dict:
|
||||
task.form_schema = form_dict
|
||||
|
||||
if form_ui_schema_file_name:
|
||||
|
@ -288,6 +281,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
if ui_form_contents:
|
||||
task.form_ui_schema = ui_form_contents
|
||||
|
||||
_munge_form_ui_schema_based_on_hidden_fields_in_task_data(task)
|
||||
|
||||
if task.properties and task.data and "instructionsForEndUser" in task.properties:
|
||||
if task.properties["instructionsForEndUser"]:
|
||||
try:
|
||||
|
@ -326,13 +321,12 @@ def process_data_show(
|
|||
)
|
||||
|
||||
|
||||
def task_submit(
|
||||
def task_submit_shared(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
body: Dict[str, Any],
|
||||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
principal = _find_principal_or_raise()
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
if not process_instance.can_submit_task():
|
||||
|
@ -365,30 +359,22 @@ def task_submit(
|
|||
if terminate_loop and spiff_task.is_looping():
|
||||
spiff_task.terminate_loop()
|
||||
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id, completed=False
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
processor.lock_process_instance("Web")
|
||||
ProcessInstanceService.complete_form_task(
|
||||
processor=processor,
|
||||
spiff_task=spiff_task,
|
||||
data=body,
|
||||
user=g.user,
|
||||
human_task=human_task,
|
||||
human_task = _find_human_task_or_raise(
|
||||
process_instance_id=process_instance_id,
|
||||
task_id=task_id,
|
||||
only_tasks_that_can_be_completed=True,
|
||||
)
|
||||
processor.unlock_process_instance("Web")
|
||||
|
||||
with sentry_sdk.start_span(op="task", description="complete_form_task"):
|
||||
processor.lock_process_instance("Web")
|
||||
ProcessInstanceService.complete_form_task(
|
||||
processor=processor,
|
||||
spiff_task=spiff_task,
|
||||
data=body,
|
||||
user=g.user,
|
||||
human_task=human_task,
|
||||
)
|
||||
processor.unlock_process_instance("Web")
|
||||
|
||||
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
||||
# task spec, complete that form as well.
|
||||
|
@ -417,6 +403,19 @@ def task_submit(
|
|||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
||||
def task_submit(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
body: Dict[str, Any],
|
||||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
with sentry_sdk.start_span(
|
||||
op="controller_action", description="tasks_controller.task_submit"
|
||||
):
|
||||
return task_submit_shared(process_instance_id, task_id, body, terminate_loop)
|
||||
|
||||
|
||||
def _get_tasks(
|
||||
processes_started_by_user: bool = True,
|
||||
has_lane_assignment_id: bool = True,
|
||||
|
@ -511,14 +510,29 @@ def _get_tasks(
|
|||
|
||||
def _prepare_form_data(
|
||||
form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo
|
||||
) -> str:
|
||||
) -> dict:
|
||||
"""Prepare_form_data."""
|
||||
if spiff_task.data is None:
|
||||
return ""
|
||||
return {}
|
||||
|
||||
file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
|
||||
try:
|
||||
return _render_jinja_template(file_contents, spiff_task)
|
||||
form_contents = _render_jinja_template(file_contents, spiff_task)
|
||||
try:
|
||||
# form_contents is a str
|
||||
hot_dict: dict = json.loads(form_contents)
|
||||
return hot_dict
|
||||
except Exception as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="error_loading_form",
|
||||
message=(
|
||||
f"Could not load form schema from: {form_file}."
|
||||
f" Error was: {str(exception)}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
except WorkflowTaskException as wfe:
|
||||
wfe.add_note(f"Error in Json Form File '{form_file}'")
|
||||
api_error = ApiError.from_workflow_exception(
|
||||
|
@ -546,9 +560,21 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
|
|||
template_error.lineno - 1
|
||||
]
|
||||
wfe.add_note(
|
||||
"Jinja2 template errors can happen when trying to displaying task data"
|
||||
"Jinja2 template errors can happen when trying to display task data"
|
||||
)
|
||||
raise wfe from template_error
|
||||
except Exception as error:
|
||||
type, value, tb = exc_info()
|
||||
wfe = WorkflowTaskException(str(error), task=spiff_task, exception=error)
|
||||
while tb:
|
||||
if tb.tb_frame.f_code.co_filename == "<template>":
|
||||
wfe.line_number = tb.tb_lineno
|
||||
wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1]
|
||||
tb = tb.tb_next
|
||||
wfe.add_note(
|
||||
"Jinja2 template errors can happen when trying to displaying task data"
|
||||
)
|
||||
raise wfe from error
|
||||
|
||||
|
||||
def _get_spiff_task_from_process_instance(
|
||||
|
@ -574,7 +600,9 @@ def _get_spiff_task_from_process_instance(
|
|||
|
||||
|
||||
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
|
||||
def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task) -> None:
|
||||
def _update_form_schema_with_task_data_as_needed(
|
||||
in_dict: dict, task: Task, spiff_task: SpiffTask
|
||||
) -> None:
|
||||
"""Update_nested."""
|
||||
if task.data is None:
|
||||
return None
|
||||
|
@ -601,7 +629,7 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task) -> N
|
|||
f" '{task_data_var}' but it doesn't exist in"
|
||||
" the Task Data."
|
||||
),
|
||||
task=task,
|
||||
task=spiff_task,
|
||||
)
|
||||
raise (
|
||||
ApiError.from_workflow_exception(
|
||||
|
@ -634,11 +662,11 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task) -> N
|
|||
|
||||
in_dict[k] = options_for_react_json_schema_form
|
||||
elif isinstance(value, dict):
|
||||
_update_form_schema_with_task_data_as_needed(value, task)
|
||||
_update_form_schema_with_task_data_as_needed(value, task, spiff_task)
|
||||
elif isinstance(value, list):
|
||||
for o in value:
|
||||
if isinstance(o, dict):
|
||||
_update_form_schema_with_task_data_as_needed(o, task)
|
||||
_update_form_schema_with_task_data_as_needed(o, task, spiff_task)
|
||||
|
||||
|
||||
def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
|
||||
|
@ -654,3 +682,32 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
|
|||
).label("potential_owner_usernames")
|
||||
|
||||
return potential_owner_usernames_from_group_concat_or_similar
|
||||
|
||||
|
||||
def _find_human_task_or_raise(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
only_tasks_that_can_be_completed: bool = False,
|
||||
) -> HumanTaskModel:
|
||||
if only_tasks_that_can_be_completed:
|
||||
human_task_query = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id, completed=False
|
||||
)
|
||||
else:
|
||||
human_task_query = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id
|
||||
)
|
||||
|
||||
human_task: HumanTaskModel = human_task_query.first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
return human_task
|
||||
|
|
|
@ -17,6 +17,7 @@ from flask import request
|
|||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
|
@ -58,6 +59,10 @@ def verify_token(
|
|||
if not token and "Authorization" in request.headers:
|
||||
token = request.headers["Authorization"].removeprefix("Bearer ")
|
||||
|
||||
if not token and "access_token" in request.cookies:
|
||||
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/"):
|
||||
token = request.cookies["access_token"]
|
||||
|
||||
# This should never be set here but just in case
|
||||
_clear_auth_tokens_from_thread_local_data()
|
||||
|
||||
|
@ -96,7 +101,7 @@ def verify_token(
|
|||
)
|
||||
if auth_token and "error" not in auth_token:
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
tld.new_access_token = auth_token["access_token"]
|
||||
tld.new_access_token = auth_token["id_token"]
|
||||
tld.new_id_token = auth_token["id_token"]
|
||||
# We have the user, but this code is a bit convoluted, and will later demand
|
||||
# a user_info object so it can look up the user. Sorry to leave this crap here.
|
||||
|
@ -186,6 +191,7 @@ def set_new_access_token_in_cookie(
|
|||
):
|
||||
domain_for_frontend_cookie = None
|
||||
|
||||
# fixme - we should not be passing the access token back to the client
|
||||
if hasattr(tld, "new_access_token") and tld.new_access_token:
|
||||
response.set_cookie(
|
||||
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
|
||||
|
@ -254,7 +260,7 @@ def parse_id_token(token: str) -> Any:
|
|||
return json.loads(decoded)
|
||||
|
||||
|
||||
def login_return(code: str, state: str, session_state: str) -> Optional[Response]:
|
||||
def login_return(code: str, state: str, session_state: str = "") -> Optional[Response]:
|
||||
"""Login_return."""
|
||||
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
||||
state_redirect_url = state_dict["redirect_url"]
|
||||
|
@ -269,12 +275,13 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
|
|||
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
||||
g.user = user_model.id
|
||||
g.token = auth_token_object["id_token"]
|
||||
AuthenticationService.store_refresh_token(
|
||||
user_model.id, auth_token_object["refresh_token"]
|
||||
)
|
||||
if "refresh_token" in auth_token_object:
|
||||
AuthenticationService.store_refresh_token(
|
||||
user_model.id, auth_token_object["refresh_token"]
|
||||
)
|
||||
redirect_url = state_redirect_url
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
tld.new_access_token = auth_token_object["access_token"]
|
||||
tld.new_access_token = auth_token_object["id_token"]
|
||||
tld.new_id_token = auth_token_object["id_token"]
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""Get_env."""
|
||||
"""Get current user."""
|
||||
from typing import Any
|
||||
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
|
@ -10,8 +11,6 @@ from spiffworkflow_backend.scripts.script import Script
|
|||
|
||||
|
||||
class GetCurrentUser(Script):
|
||||
"""GetCurrentUser."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
|
@ -28,4 +27,7 @@ class GetCurrentUser(Script):
|
|||
**kwargs: Any
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
return g.user.username
|
||||
# dump the user using our json encoder and then load it back up as a dict
|
||||
# to remove unwanted field types
|
||||
user_as_json_string = current_app.json.dumps(g.user)
|
||||
return current_app.json.loads(user_as_json_string)
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
"""Markdown_file_download_link."""
|
||||
from typing import Any
|
||||
from urllib.parse import unquote
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
|
||||
|
||||
class GetMarkdownFileDownloadLink(Script):
|
||||
"""GetMarkdownFileDownloadLink."""
|
||||
|
||||
@staticmethod
|
||||
def requires_privileged_permissions() -> bool:
|
||||
"""We have deemed this function safe to run without elevated permissions."""
|
||||
return False
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Returns a string which is a string in markdown format."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*_args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
# example input:
|
||||
# "data:application/pdf;name=Harmeet_1234.pdf;base64,JV...."
|
||||
process_data_identifier = kwargs["key"]
|
||||
parts = kwargs["file_data"].split(";")
|
||||
file_index = kwargs["file_index"]
|
||||
label = unquote(parts[1].split("=")[1])
|
||||
process_model_identifier = script_attributes_context.process_model_identifier
|
||||
modified_process_model_identifier = (
|
||||
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_model_identifier
|
||||
)
|
||||
)
|
||||
process_instance_id = script_attributes_context.process_instance_id
|
||||
url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"]
|
||||
url += (
|
||||
f"/v1.0/process-data-file-download/{modified_process_model_identifier}/"
|
||||
+ f"{process_instance_id}/{process_data_identifier}?index={file_index}"
|
||||
)
|
||||
link = f"[{label}]({url})"
|
||||
|
||||
return link
|
|
@ -11,7 +11,6 @@ from flask import current_app
|
|||
from flask import redirect
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
|
||||
|
||||
|
@ -20,7 +19,21 @@ class MissingAccessTokenError(Exception):
|
|||
"""MissingAccessTokenError."""
|
||||
|
||||
|
||||
class NotAuthorizedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class RefreshTokenStorageError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UserNotLoggedInError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# These could be either 'id' OR 'access' tokens and we can't always know which
|
||||
|
||||
|
||||
class TokenExpiredError(Exception):
|
||||
"""TokenExpiredError."""
|
||||
|
||||
|
@ -29,6 +42,10 @@ class TokenInvalidError(Exception):
|
|||
"""TokenInvalidError."""
|
||||
|
||||
|
||||
class TokenNotProvidedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AuthenticationProviderTypes(enum.Enum):
|
||||
"""AuthenticationServiceProviders."""
|
||||
|
||||
|
@ -183,9 +200,8 @@ class AuthenticationService:
|
|||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise ApiError(
|
||||
error_code="store_refresh_token_error",
|
||||
message=f"We could not store the refresh token. Original error is {e}",
|
||||
raise RefreshTokenStorageError(
|
||||
f"We could not store the refresh token. Original error is {e}",
|
||||
) from e
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -21,7 +21,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
|||
from sqlalchemy import or_
|
||||
from sqlalchemy import text
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
|
@ -34,6 +33,11 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
||||
from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint
|
||||
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenExpiredError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
|
||||
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
|
||||
from spiffworkflow_backend.services.group_service import GroupService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
@ -98,20 +102,16 @@ class AuthorizationService:
|
|||
def verify_sha256_token(cls, auth_header: Optional[str]) -> None:
|
||||
"""Verify_sha256_token."""
|
||||
if auth_header is None:
|
||||
raise ApiError(
|
||||
error_code="unauthorized",
|
||||
message="",
|
||||
status_code=403,
|
||||
raise TokenNotProvidedError(
|
||||
"unauthorized",
|
||||
)
|
||||
|
||||
received_sign = auth_header.split("sha256=")[-1].strip()
|
||||
secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode()
|
||||
expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest()
|
||||
if not compare_digest(received_sign, expected_sign):
|
||||
raise ApiError(
|
||||
error_code="unauthorized",
|
||||
message="",
|
||||
status_code=403,
|
||||
raise TokenInvalidError(
|
||||
"unauthorized",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -393,10 +393,8 @@ class AuthorizationService:
|
|||
authorization_exclusion_list = ["permissions_check"]
|
||||
|
||||
if not hasattr(g, "user"):
|
||||
raise ApiError(
|
||||
error_code="user_not_logged_in",
|
||||
message="User is not logged in. Please log in",
|
||||
status_code=401,
|
||||
raise UserNotLoggedInError(
|
||||
"User is not logged in. Please log in",
|
||||
)
|
||||
|
||||
api_view_function = current_app.view_functions[request.endpoint]
|
||||
|
@ -416,13 +414,11 @@ class AuthorizationService:
|
|||
if has_permission:
|
||||
return None
|
||||
|
||||
raise ApiError(
|
||||
error_code="unauthorized",
|
||||
message=(
|
||||
raise NotAuthorizedError(
|
||||
(
|
||||
f"User {g.user.username} is not authorized to perform requested action:"
|
||||
f" {permission_string} - {request.path}"
|
||||
),
|
||||
status_code=403,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -440,13 +436,11 @@ class AuthorizationService:
|
|||
payload = jwt.decode(auth_token, options={"verify_signature": False})
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError as exception:
|
||||
raise ApiError(
|
||||
"token_expired",
|
||||
raise TokenExpiredError(
|
||||
"The Authentication token you provided expired and must be renewed.",
|
||||
) from exception
|
||||
except jwt.InvalidTokenError as exception:
|
||||
raise ApiError(
|
||||
"token_invalid",
|
||||
raise TokenInvalidError(
|
||||
(
|
||||
"The Authentication token you provided is invalid. You need a new"
|
||||
" token. "
|
||||
|
@ -463,6 +457,7 @@ class AuthorizationService:
|
|||
human_task = HumanTaskModel.query.filter_by(
|
||||
task_name=spiff_task.task_spec.name,
|
||||
process_instance_id=process_instance_id,
|
||||
completed=False,
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise HumanTaskNotFoundError(
|
||||
|
@ -490,38 +485,42 @@ class AuthorizationService:
|
|||
.filter(UserModel.service_id == user_info["sub"])
|
||||
.first()
|
||||
)
|
||||
email = display_name = username = ""
|
||||
user_attributes = {}
|
||||
|
||||
if "email" in user_info:
|
||||
username = user_info["email"]
|
||||
email = user_info["email"]
|
||||
user_attributes["username"] = user_info["email"]
|
||||
user_attributes["email"] = user_info["email"]
|
||||
else: # we fall back to the sub, which may be very ugly.
|
||||
username = user_info["sub"] + "@" + user_info["iss"]
|
||||
fallback_username = user_info["sub"] + "@" + user_info["iss"]
|
||||
user_attributes["username"] = fallback_username
|
||||
|
||||
if "preferred_username" in user_info:
|
||||
display_name = user_info["preferred_username"]
|
||||
user_attributes["display_name"] = user_info["preferred_username"]
|
||||
elif "nickname" in user_info:
|
||||
display_name = user_info["nickname"]
|
||||
user_attributes["display_name"] = user_info["nickname"]
|
||||
elif "name" in user_info:
|
||||
display_name = user_info["name"]
|
||||
user_attributes["display_name"] = user_info["name"]
|
||||
|
||||
user_attributes["service"] = user_info["iss"]
|
||||
user_attributes["service_id"] = user_info["sub"]
|
||||
|
||||
for field_index, tenant_specific_field in enumerate(
|
||||
current_app.config["TENANT_SPECIFIC_FIELDS"]
|
||||
):
|
||||
if tenant_specific_field in user_info:
|
||||
field_number = field_index + 1
|
||||
user_attributes[f"tenant_specific_field_{field_number}"] = user_info[
|
||||
tenant_specific_field
|
||||
]
|
||||
|
||||
if user_model is None:
|
||||
current_app.logger.debug("create_user in login_return")
|
||||
is_new_user = True
|
||||
user_model = UserService().create_user(
|
||||
username=username,
|
||||
service=user_info["iss"],
|
||||
service_id=user_info["sub"],
|
||||
email=email,
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
user_model = UserService().create_user(**user_attributes)
|
||||
else:
|
||||
# Update with the latest information
|
||||
user_model.username = username
|
||||
user_model.email = email
|
||||
user_model.display_name = display_name
|
||||
user_model.service = user_info["iss"]
|
||||
user_model.service_id = user_info["sub"]
|
||||
for key, value in user_attributes.items():
|
||||
setattr(user_model, key, value)
|
||||
|
||||
# this may eventually get too slow.
|
||||
# when it does, be careful about backgrounding, because
|
||||
|
@ -551,7 +550,9 @@ class AuthorizationService:
|
|||
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
|
||||
# we were thinking that if you can start an instance, you ought to be able to view your own instances.
|
||||
# we were thinking that if you can start an instance, you ought to be able to:
|
||||
# 1. view your own instances.
|
||||
# 2. view the logs for these instances.
|
||||
if permission_set == "start":
|
||||
target_uri = f"/process-instances/{process_related_path_segment}"
|
||||
permissions_to_assign.append(
|
||||
|
@ -561,6 +562,10 @@ class AuthorizationService:
|
|||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||
)
|
||||
target_uri = f"/logs/{process_related_path_segment}"
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||
)
|
||||
|
||||
else:
|
||||
if permission_set == "all":
|
||||
|
|
|
@ -240,5 +240,8 @@ class DBHandler(logging.Handler):
|
|||
"spiff_step": spiff_step,
|
||||
}
|
||||
)
|
||||
if len(self.logs) % 1 == 0:
|
||||
# so at some point we are going to insert logs.
|
||||
# we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting
|
||||
# on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log.
|
||||
if len(self.logs) >= 100:
|
||||
self.bulk_insert_logs()
|
||||
|
|
|
@ -26,8 +26,13 @@ from lxml import etree # type: ignore
|
|||
from lxml.etree import XMLSyntaxError # type: ignore
|
||||
from RestrictedPython import safe_globals # type: ignore
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment
|
||||
from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore
|
||||
EventBasedGatewayConverter,
|
||||
)
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
|
||||
|
@ -36,36 +41,12 @@ from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignor
|
|||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
CallActivityTaskConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
EventBasedGatewayConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
IntermediateCatchEventConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
IntermediateThrowEventConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ManualTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ReceiveTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ScriptTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import SendTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ServiceTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import StartEventConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import SubWorkflowTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
TransactionSubprocessConverter,
|
||||
)
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
|
@ -108,6 +89,8 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg
|
|||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter)
|
||||
|
||||
|
||||
# Sorry about all this crap. I wanted to move this thing to another file, but
|
||||
# importing a bunch of types causes circular imports.
|
||||
|
@ -150,6 +133,137 @@ class ProcessInstanceLockedBySomethingElseError(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
|
||||
def __init__(self, environment_globals: Dict[str, Any]):
|
||||
"""BoxedTaskDataBasedScriptEngineEnvironment."""
|
||||
self._last_result: Dict[str, Any] = {}
|
||||
super().__init__(environment_globals)
|
||||
|
||||
def execute(
|
||||
self,
|
||||
script: str,
|
||||
context: Dict[str, Any],
|
||||
external_methods: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
super().execute(script, context, external_methods)
|
||||
self._last_result = context
|
||||
|
||||
def last_result(self) -> Dict[str, Any]:
|
||||
return {k: v for k, v in self._last_result.items()}
|
||||
|
||||
def clear_state(self) -> None:
|
||||
pass
|
||||
|
||||
def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
pass
|
||||
|
||||
def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
pass
|
||||
|
||||
def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
pass
|
||||
|
||||
def revise_state_with_task_data(self, task: SpiffTask) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment): # type: ignore
|
||||
PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state"
|
||||
|
||||
def __init__(self, environment_globals: Dict[str, Any]):
|
||||
"""NonTaskDataBasedScriptEngineEnvironment."""
|
||||
self.state: Dict[str, Any] = {}
|
||||
self.non_user_defined_keys = set(
|
||||
[*environment_globals.keys()] + ["__builtins__", "current_user"]
|
||||
)
|
||||
super().__init__(environment_globals)
|
||||
|
||||
def evaluate(
|
||||
self,
|
||||
expression: str,
|
||||
context: Dict[str, Any],
|
||||
external_methods: Optional[dict[str, Any]] = None,
|
||||
) -> Any:
|
||||
# TODO: once integrated look at the tests that fail without Box
|
||||
Box.convert_to_box(context)
|
||||
state = {}
|
||||
state.update(self.globals)
|
||||
state.update(external_methods or {})
|
||||
state.update(self.state)
|
||||
state.update(context)
|
||||
return eval(expression, state) # noqa
|
||||
|
||||
def execute(
|
||||
self,
|
||||
script: str,
|
||||
context: Dict[str, Any],
|
||||
external_methods: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
# TODO: once integrated look at the tests that fail without Box
|
||||
Box.convert_to_box(context)
|
||||
self.state.update(self.globals)
|
||||
self.state.update(external_methods or {})
|
||||
self.state.update(context)
|
||||
exec(script, self.state) # noqa
|
||||
|
||||
self.state = self._user_defined_state(external_methods)
|
||||
|
||||
# the task data needs to be updated with the current state so data references can be resolved properly.
|
||||
# the state will be removed later once the task is completed.
|
||||
context.update(self.state)
|
||||
|
||||
def _user_defined_state(
|
||||
self, external_methods: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
keys_to_filter = self.non_user_defined_keys
|
||||
if external_methods is not None:
|
||||
keys_to_filter |= set(external_methods.keys())
|
||||
|
||||
return {
|
||||
k: v
|
||||
for k, v in self.state.items()
|
||||
if k not in keys_to_filter and not callable(v)
|
||||
}
|
||||
|
||||
def last_result(self) -> Dict[str, Any]:
|
||||
return {k: v for k, v in self.state.items()}
|
||||
|
||||
def clear_state(self) -> None:
|
||||
self.state = {}
|
||||
|
||||
def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
key = self.PYTHON_ENVIRONMENT_STATE_KEY
|
||||
state = self._user_defined_state()
|
||||
bpmn_process_instance.data[key] = state
|
||||
|
||||
def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
key = self.PYTHON_ENVIRONMENT_STATE_KEY
|
||||
self.state = bpmn_process_instance.data.get(key, {})
|
||||
|
||||
def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
bpmn_process_instance.data.update(self._user_defined_state())
|
||||
|
||||
def revise_state_with_task_data(self, task: SpiffTask) -> None:
|
||||
state_keys = set(self.state.keys())
|
||||
task_data_keys = set(task.data.keys())
|
||||
state_keys_to_remove = state_keys - task_data_keys
|
||||
task_data_keys_to_keep = task_data_keys - state_keys
|
||||
|
||||
self.state = {
|
||||
k: v for k, v in self.state.items() if k not in state_keys_to_remove
|
||||
}
|
||||
task.data = {k: v for k, v in task.data.items() if k in task_data_keys_to_keep}
|
||||
|
||||
if hasattr(task.task_spec, "_result_variable"):
|
||||
result_variable = task.task_spec._result_variable(task)
|
||||
if result_variable in task.data:
|
||||
self.state[result_variable] = task.data.pop(result_variable)
|
||||
|
||||
|
||||
class CustomScriptEngineEnvironment(NonTaskDataBasedScriptEngineEnvironment):
|
||||
pass
|
||||
|
||||
|
||||
class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||
"""This is a custom script processor that can be easily injected into Spiff Workflow.
|
||||
|
||||
|
@ -179,7 +293,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
default_globals.update(safe_globals)
|
||||
default_globals["__builtins__"]["__import__"] = _import
|
||||
|
||||
super().__init__(default_globals=default_globals)
|
||||
environment = CustomScriptEngineEnvironment(default_globals)
|
||||
|
||||
super().__init__(environment=environment)
|
||||
|
||||
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
|
||||
"""__get_augment_methods."""
|
||||
|
@ -278,29 +394,12 @@ class ProcessInstanceProcessor:
|
|||
|
||||
_script_engine = CustomBpmnScriptEngine()
|
||||
SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
|
||||
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||
[
|
||||
BoundaryEventConverter,
|
||||
BusinessRuleTaskConverter,
|
||||
CallActivityTaskConverter,
|
||||
EndEventConverter,
|
||||
IntermediateCatchEventConverter,
|
||||
IntermediateThrowEventConverter,
|
||||
EventBasedGatewayConverter,
|
||||
ManualTaskConverter,
|
||||
NoneTaskConverter,
|
||||
ReceiveTaskConverter,
|
||||
ScriptTaskConverter,
|
||||
SendTaskConverter,
|
||||
ServiceTaskConverter,
|
||||
StartEventConverter,
|
||||
SubWorkflowTaskConverter,
|
||||
TransactionSubprocessConverter,
|
||||
UserTaskConverter,
|
||||
]
|
||||
SPIFF_SPEC_CONFIG
|
||||
)
|
||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||
_event_serializer = EventBasedGatewayConverter()
|
||||
_event_serializer = EventBasedGatewayConverter(wf_spec_converter)
|
||||
|
||||
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
||||
VALIDATION_PROCESS_KEY = "validate_only"
|
||||
|
@ -392,7 +491,7 @@ class ProcessInstanceProcessor:
|
|||
validate_only,
|
||||
subprocesses=subprocesses,
|
||||
)
|
||||
self.bpmn_process_instance.script_engine = self._script_engine
|
||||
self.set_script_engine(self.bpmn_process_instance)
|
||||
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
||||
|
||||
except MissingSpecError as ke:
|
||||
|
@ -438,6 +537,18 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_spec, subprocesses
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
ProcessInstanceProcessor._script_engine.environment.restore_state(
|
||||
bpmn_process_instance
|
||||
)
|
||||
bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine
|
||||
|
||||
def preserve_script_engine_state(self) -> None:
|
||||
ProcessInstanceProcessor._script_engine.environment.preserve_state(
|
||||
self.bpmn_process_instance
|
||||
)
|
||||
|
||||
def current_user(self) -> Any:
|
||||
"""Current_user."""
|
||||
current_user = None
|
||||
|
@ -470,11 +581,12 @@ class ProcessInstanceProcessor:
|
|||
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
|
||||
) -> BpmnWorkflow:
|
||||
"""Get_bpmn_process_instance_from_workflow_spec."""
|
||||
return BpmnWorkflow(
|
||||
bpmn_process_instance = BpmnWorkflow(
|
||||
spec,
|
||||
script_engine=ProcessInstanceProcessor._script_engine,
|
||||
subprocess_specs=subprocesses,
|
||||
)
|
||||
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
||||
return bpmn_process_instance
|
||||
|
||||
@staticmethod
|
||||
def __get_bpmn_process_instance(
|
||||
|
@ -497,13 +609,11 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
)
|
||||
except Exception as err:
|
||||
raise (err)
|
||||
raise err
|
||||
finally:
|
||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||
|
||||
bpmn_process_instance.script_engine = (
|
||||
ProcessInstanceProcessor._script_engine
|
||||
)
|
||||
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
||||
else:
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
||||
|
@ -528,7 +638,7 @@ class ProcessInstanceProcessor:
|
|||
) -> None:
|
||||
"""Raise_if_no_potential_owners."""
|
||||
if not potential_owner_ids:
|
||||
raise (NoPotentialOwnersForTaskError(message))
|
||||
raise NoPotentialOwnersForTaskError(message)
|
||||
|
||||
def get_potential_owner_ids_from_task(
|
||||
self, task: SpiffTask
|
||||
|
@ -580,9 +690,13 @@ class ProcessInstanceProcessor:
|
|||
|
||||
def spiff_step_details_mapping(self) -> dict:
|
||||
"""SaveSpiffStepDetails."""
|
||||
bpmn_json = self.serialize()
|
||||
wf_json = json.loads(bpmn_json)
|
||||
task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]}
|
||||
# bpmn_json = self.serialize()
|
||||
# wf_json = json.loads(bpmn_json)
|
||||
task_json: Dict[str, Any] = {
|
||||
# "tasks": wf_json["tasks"],
|
||||
# "subprocesses": wf_json["subprocesses"],
|
||||
# "python_env": self._script_engine.environment.last_result(),
|
||||
}
|
||||
|
||||
return {
|
||||
"process_instance_id": self.process_instance_model.id,
|
||||
|
@ -595,13 +709,7 @@ class ProcessInstanceProcessor:
|
|||
def spiff_step_details(self) -> SpiffStepDetailsModel:
|
||||
"""SaveSpiffStepDetails."""
|
||||
details_mapping = self.spiff_step_details_mapping()
|
||||
details_model = SpiffStepDetailsModel(
|
||||
process_instance_id=details_mapping["process_instance_id"],
|
||||
spiff_step=details_mapping["spiff_step"],
|
||||
task_json=details_mapping["task_json"],
|
||||
timestamp=details_mapping["timestamp"],
|
||||
# completed_by_user_id=details_mapping["completed_by_user_id"],
|
||||
)
|
||||
details_model = SpiffStepDetailsModel(**details_mapping)
|
||||
return details_model
|
||||
|
||||
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
|
||||
|
@ -867,7 +975,7 @@ class ProcessInstanceProcessor:
|
|||
def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
|
||||
"""Send an event to the workflow."""
|
||||
payload = event_data.pop("payload", None)
|
||||
event_definition = self._event_serializer.restore(event_data)
|
||||
event_definition = self._event_serializer.registry.restore(event_data)
|
||||
if payload is not None:
|
||||
event_definition.payload = payload
|
||||
current_app.logger.info(
|
||||
|
@ -1384,25 +1492,51 @@ class ProcessInstanceProcessor:
|
|||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""Do_engine_steps."""
|
||||
step_details = []
|
||||
|
||||
tasks_to_log = {
|
||||
"BPMN Task",
|
||||
"Script Task",
|
||||
"Service Task"
|
||||
# "End Event",
|
||||
# "Default Start Event",
|
||||
# "Exclusive Gateway",
|
||||
# "End Join",
|
||||
# "End Event",
|
||||
# "Default Throwing Event",
|
||||
# "Subprocess"
|
||||
}
|
||||
|
||||
def should_log(task: SpiffTask) -> bool:
|
||||
if (
|
||||
task.task_spec.spec_type in tasks_to_log
|
||||
and not task.task_spec.name.endswith(".EndJoin")
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def will_complete_task(task: SpiffTask) -> None:
|
||||
if should_log(task):
|
||||
self.increment_spiff_step()
|
||||
|
||||
def did_complete_task(task: SpiffTask) -> None:
|
||||
if should_log(task):
|
||||
self._script_engine.environment.revise_state_with_task_data(task)
|
||||
step_details.append(self.spiff_step_details_mapping())
|
||||
|
||||
try:
|
||||
self.bpmn_process_instance.refresh_waiting_tasks(
|
||||
#
|
||||
# commenting out to see if this helps with the growing spiff steps/db issue
|
||||
#
|
||||
# will_refresh_task=lambda t: self.increment_spiff_step(),
|
||||
# did_refresh_task=lambda t: step_details.append(
|
||||
# self.spiff_step_details_mapping()
|
||||
# ),
|
||||
)
|
||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||
|
||||
self.bpmn_process_instance.do_engine_steps(
|
||||
exit_at=exit_at,
|
||||
will_complete_task=lambda t: self.increment_spiff_step(),
|
||||
did_complete_task=lambda t: step_details.append(
|
||||
self.spiff_step_details_mapping()
|
||||
),
|
||||
will_complete_task=will_complete_task,
|
||||
did_complete_task=did_complete_task,
|
||||
)
|
||||
|
||||
if self.bpmn_process_instance.is_completed():
|
||||
self._script_engine.environment.finalize_result(
|
||||
self.bpmn_process_instance
|
||||
)
|
||||
|
||||
self.process_bpmn_messages()
|
||||
self.queue_waiting_receive_messages()
|
||||
|
||||
|
@ -1412,9 +1546,8 @@ class ProcessInstanceProcessor:
|
|||
if hasattr(handler, "bulk_insert_logs"):
|
||||
handler.bulk_insert_logs() # type: ignore
|
||||
db.session.commit()
|
||||
|
||||
except WorkflowTaskException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
except SpiffWorkflowException as swe:
|
||||
raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe
|
||||
|
||||
finally:
|
||||
if save:
|
||||
|
@ -1466,6 +1599,7 @@ class ProcessInstanceProcessor:
|
|||
def serialize(self) -> str:
|
||||
"""Serialize."""
|
||||
self.check_task_data_size()
|
||||
self.preserve_script_engine_state()
|
||||
return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore
|
||||
|
||||
def next_user_tasks(self) -> list[SpiffTask]:
|
||||
|
|
|
@ -4,6 +4,7 @@ from typing import Any
|
|||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
import sentry_sdk
|
||||
from flask import current_app
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
|
||||
|
@ -234,8 +235,9 @@ class ProcessInstanceService:
|
|||
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
|
||||
processor.complete_task(spiff_task, human_task, user=user)
|
||||
|
||||
# maybe move this out once we have the interstitial page since this is here just so we can get the next human task
|
||||
processor.do_engine_steps(save=True)
|
||||
with sentry_sdk.start_span(op="task", description="backend_do_engine_steps"):
|
||||
# maybe move this out once we have the interstitial page since this is here just so we can get the next human task
|
||||
processor.do_engine_steps(save=True)
|
||||
|
||||
@staticmethod
|
||||
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
|
||||
|
|
|
@ -499,7 +499,10 @@ class ProcessModelService(FileSystemService):
|
|||
if name is None:
|
||||
raise ApiError(
|
||||
error_code="missing_name_of_process_model",
|
||||
message="Missing name of process model. It should be given",
|
||||
message=(
|
||||
"Missing name of process model. Path not found:"
|
||||
f" {json_file_path}"
|
||||
),
|
||||
)
|
||||
|
||||
process_model_info = ProcessModelInfo(
|
||||
|
|
|
@ -45,6 +45,7 @@ class ScriptUnitTestRunner:
|
|||
context = input_context.copy()
|
||||
|
||||
try:
|
||||
cls._script_engine.environment.clear_state()
|
||||
cls._script_engine._execute(context=context, script=script)
|
||||
except SyntaxError as ex:
|
||||
return ScriptUnitTestResult(
|
||||
|
@ -77,6 +78,7 @@ class ScriptUnitTestRunner:
|
|||
error=f"Failed to execute script: {error_message}",
|
||||
)
|
||||
|
||||
context = cls._script_engine.environment.last_result()
|
||||
result_as_boolean = context == expected_output_context
|
||||
|
||||
script_unit_test_result = ScriptUnitTestResult(
|
||||
|
|
|
@ -3,6 +3,7 @@ import json
|
|||
from typing import Any
|
||||
|
||||
import requests
|
||||
import sentry_sdk
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
|
||||
|
@ -45,27 +46,27 @@ class ServiceTaskDelegate:
|
|||
@staticmethod
|
||||
def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str:
|
||||
"""Calls a connector via the configured proxy."""
|
||||
params = {
|
||||
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
||||
for k, v in bpmn_params.items()
|
||||
}
|
||||
params["spiff__task_data"] = task_data
|
||||
call_url = f"{connector_proxy_url()}/v1/do/{name}"
|
||||
with sentry_sdk.start_span(op="call-connector", description=call_url):
|
||||
params = {
|
||||
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
||||
for k, v in bpmn_params.items()
|
||||
}
|
||||
params["spiff__task_data"] = task_data
|
||||
|
||||
proxied_response = requests.post(
|
||||
f"{connector_proxy_url()}/v1/do/{name}", json=params
|
||||
)
|
||||
proxied_response = requests.post(call_url, json=params)
|
||||
|
||||
parsed_response = json.loads(proxied_response.text)
|
||||
parsed_response = json.loads(proxied_response.text)
|
||||
|
||||
if "refreshed_token_set" not in parsed_response:
|
||||
return proxied_response.text
|
||||
if "refreshed_token_set" not in parsed_response:
|
||||
return proxied_response.text
|
||||
|
||||
secret_key = parsed_response["auth"]
|
||||
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
|
||||
user_id = g.user.id if UserService.has_user() else None
|
||||
SecretService().update_secret(secret_key, refreshed_token_set, user_id)
|
||||
secret_key = parsed_response["auth"]
|
||||
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
|
||||
user_id = g.user.id if UserService.has_user() else None
|
||||
SecretService().update_secret(secret_key, refreshed_token_set, user_id)
|
||||
|
||||
return json.dumps(parsed_response["api_response"])
|
||||
return json.dumps(parsed_response["api_response"])
|
||||
|
||||
|
||||
class ServiceTaskService:
|
||||
|
|
|
@ -29,6 +29,9 @@ class UserService:
|
|||
service_id: str,
|
||||
email: Optional[str] = "",
|
||||
display_name: Optional[str] = "",
|
||||
tenant_specific_field_1: Optional[str] = None,
|
||||
tenant_specific_field_2: Optional[str] = None,
|
||||
tenant_specific_field_3: Optional[str] = None,
|
||||
) -> UserModel:
|
||||
"""Create_user."""
|
||||
user_model: Optional[UserModel] = (
|
||||
|
@ -46,6 +49,9 @@ class UserService:
|
|||
service_id=service_id,
|
||||
email=email,
|
||||
display_name=display_name,
|
||||
tenant_specific_field_1=tenant_specific_field_1,
|
||||
tenant_specific_field_2=tenant_specific_field_2,
|
||||
tenant_specific_field_3=tenant_specific_field_3,
|
||||
)
|
||||
db.session.add(user_model)
|
||||
|
||||
|
|
|
@ -13,6 +13,18 @@
|
|||
"selectedColor": {
|
||||
"$ref": "#/definitions/Color",
|
||||
"title": "Select color"
|
||||
},
|
||||
"veryImportantFieldButOnlySometimes": {
|
||||
"title": "Very important field",
|
||||
"type": "string"
|
||||
},
|
||||
"building": {
|
||||
"properties": {
|
||||
"floor": {
|
||||
"title": "Floor",
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,9 @@
|
|||
<bpmn:scriptTask id="Activity_1qtnye8" name="set color options" scriptFormat="python">
|
||||
<bpmn:incoming>Flow_1my9ag5</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0b04rbg</bpmn:outgoing>
|
||||
<bpmn:script>awesome_color_options = [{"value": "blue", "label": "Blue"}, {"value": "green", "label": "Green"}]</bpmn:script>
|
||||
<bpmn:script>awesome_color_options = [{"value": "blue", "label": "Blue"}, {"value": "green", "label": "Green"}]
|
||||
form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes", "building.floor"]
|
||||
</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:userTask id="Activity_1gqykqt" name="ask user for color">
|
||||
<bpmn:extensionElements>
|
||||
|
|
|
@ -82,7 +82,8 @@
|
|||
<bpmn:incoming>Flow_0bgkfue</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ivhu7x</bpmn:outgoing>
|
||||
<bpmn:script>approver = get_current_user()
|
||||
lane_owners["Finance Team"].remove(approver)</bpmn:script>
|
||||
username = approver['username']
|
||||
lane_owners["Finance Team"].remove(username)</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
|
|
|
@ -1687,6 +1687,14 @@ class TestProcessApi(BaseTest):
|
|||
== "Green"
|
||||
)
|
||||
|
||||
# if you set this in task data:
|
||||
# form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes", "building.floor"]
|
||||
# you will get this ui schema:
|
||||
assert response.json["form_ui_schema"] == {
|
||||
"building": {"floor": {"ui:widget": "hidden"}},
|
||||
"veryImportantFieldButOnlySometimes": {"ui:widget": "hidden"},
|
||||
}
|
||||
|
||||
def test_process_instance_list_with_default_list(
|
||||
self,
|
||||
app: Flask,
|
||||
|
|
|
@ -41,6 +41,11 @@ class TestGetAllPermissions(BaseTest):
|
|||
)
|
||||
|
||||
expected_permissions = [
|
||||
{
|
||||
"group_identifier": "my_test_group",
|
||||
"uri": "/logs/hey:group:*",
|
||||
"permissions": ["read"],
|
||||
},
|
||||
{
|
||||
"group_identifier": "my_test_group",
|
||||
"uri": "/process-instances/hey:group:*",
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
"""Test_get_localtime."""
|
||||
import json
|
||||
|
||||
from flask import g
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.scripts.get_current_user import GetCurrentUser
|
||||
|
||||
|
||||
class TestGetCurrentUser(BaseTest):
|
||||
def test_get_current_user(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_can_get_members_of_a_group."""
|
||||
testuser1 = self.find_or_create_user("testuser1")
|
||||
testuser1.tenant_specific_field_1 = "456"
|
||||
db.session.add(testuser1)
|
||||
db.session.commit()
|
||||
|
||||
testuser1 = self.find_or_create_user("testuser1")
|
||||
g.user = testuser1
|
||||
process_model_identifier = "test_process_model"
|
||||
process_instance_id = 1
|
||||
script_attributes_context = ScriptAttributesContext(
|
||||
task=None,
|
||||
environment_identifier="testing",
|
||||
process_instance_id=process_instance_id,
|
||||
process_model_identifier=process_model_identifier,
|
||||
)
|
||||
result = GetCurrentUser().run(
|
||||
script_attributes_context,
|
||||
)
|
||||
assert result["username"] == "testuser1"
|
||||
assert result["tenant_specific_field_1"] == "456"
|
||||
json.dumps(result)
|
|
@ -87,7 +87,8 @@ class TestGetLocaltime(BaseTest):
|
|||
)
|
||||
|
||||
assert spiff_task
|
||||
data = spiff_task.data
|
||||
|
||||
data = ProcessInstanceProcessor._script_engine.environment.last_result()
|
||||
some_time = data["some_time"]
|
||||
localtime = data["localtime"]
|
||||
timezone = data["timezone"]
|
||||
|
|
|
@ -197,6 +197,10 @@ class TestAuthorizationService(BaseTest):
|
|||
) -> None:
|
||||
"""Test_explode_permissions_start_on_process_group."""
|
||||
expected_permissions = [
|
||||
(
|
||||
"/logs/some-process-group:some-process-model:*",
|
||||
"read",
|
||||
),
|
||||
(
|
||||
"/process-instances/for-me/some-process-group:some-process-model:*",
|
||||
"read",
|
||||
|
@ -255,6 +259,10 @@ class TestAuthorizationService(BaseTest):
|
|||
) -> None:
|
||||
"""Test_explode_permissions_start_on_process_model."""
|
||||
expected_permissions = [
|
||||
(
|
||||
"/logs/some-process-group:some-process-model/*",
|
||||
"read",
|
||||
),
|
||||
(
|
||||
"/process-instances/for-me/some-process-group:some-process-model/*",
|
||||
"read",
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
/node_modules
|
|
@ -12,6 +12,12 @@ FROM base AS setup
|
|||
|
||||
COPY . /app/
|
||||
|
||||
RUN cp /app/package.json /app/package.json.bak
|
||||
ADD justservewebserver.package.json /app/package.json
|
||||
RUN npm ci --ignore-scripts
|
||||
RUN cp -r /app/node_modules /app/node_modules.justserve
|
||||
RUN cp /app/package.json.bak /app/package.json
|
||||
|
||||
# npm ci because it respects the lock file.
|
||||
# --ignore-scripts because authors can do bad things in postinstall scripts.
|
||||
# https://cheatsheetseries.owasp.org/cheatsheets/NPM_Security_Cheat_Sheet.html
|
||||
|
@ -31,5 +37,6 @@ ENV PORT0=7001
|
|||
|
||||
COPY --from=setup /app/build /app/build
|
||||
COPY --from=setup /app/bin /app/bin
|
||||
COPY --from=setup /app/node_modules.justserve /app/node_modules
|
||||
|
||||
ENTRYPOINT ["/app/bin/boot_server_in_docker"]
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
"name": "spiffworkflow-frontend",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"serve": "^14.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "ESLINT_NO_DEV_ERRORS=true PORT=7001 craco start",
|
||||
"build": "craco build",
|
||||
"test": "react-scripts test --coverage",
|
||||
"t": "npm test -- --watchAll=false",
|
||||
"eject": "craco eject",
|
||||
"format": "prettier --write src/**/*.[tj]s{,x}",
|
||||
"lint": "./node_modules/.bin/eslint src",
|
||||
"lint:fix": "./node_modules/.bin/eslint --fix src"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": [
|
||||
"react-app",
|
||||
"react-app/jest"
|
||||
]
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.2%",
|
||||
"not dead",
|
||||
"not op_mini all"
|
||||
],
|
||||
"development": [
|
||||
"last 1 chrome version",
|
||||
"last 1 firefox version",
|
||||
"last 1 safari version"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -32,7 +32,7 @@
|
|||
"@types/node": "^18.6.5",
|
||||
"@types/react": "^18.0.17",
|
||||
"@types/react-dom": "^18.0.6",
|
||||
"@uiw/react-md-editor": "^3.19.5",
|
||||
"@uiw/react-md-editor": "^3.20.2",
|
||||
"autoprefixer": "10.4.8",
|
||||
"axios": "^0.27.2",
|
||||
"bootstrap": "^5.2.0",
|
||||
|
@ -58,11 +58,9 @@
|
|||
"react-dom": "^18.2.0",
|
||||
"react-icons": "^4.4.0",
|
||||
"react-jsonschema-form": "^1.8.1",
|
||||
"react-markdown": "^8.0.3",
|
||||
"react-router": "^6.3.0",
|
||||
"react-router-dom": "^6.3.0",
|
||||
"react-scripts": "^5.0.1",
|
||||
"remark-gfm": "^3.0.1",
|
||||
"serve": "^14.0.0",
|
||||
"timepicker": "^1.13.18",
|
||||
"typescript": "^4.7.4",
|
||||
|
@ -6473,9 +6471,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@uiw/react-md-editor": {
|
||||
"version": "3.19.5",
|
||||
"resolved": "https://registry.npmjs.org/@uiw/react-md-editor/-/react-md-editor-3.19.5.tgz",
|
||||
"integrity": "sha512-uhFGLOrKEtADM8QUauTdG5x8wqNS15ry2jCYMBr/E1Or3njvg7jpB0KRv+QTgZDnglevCTjuQxZPH7I7hG2uKw==",
|
||||
"version": "3.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@uiw/react-md-editor/-/react-md-editor-3.20.2.tgz",
|
||||
"integrity": "sha512-L3sp3dsbpTOcVX+mzkmwwDl2Jl/UEgrySTun4XCMck1QF1WX53z0sHbN6XET+veHOML9Tw8TUUECR7IqajYjDw==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.14.6",
|
||||
"@uiw/react-markdown-preview": "^4.1.5",
|
||||
|
@ -36996,9 +36994,9 @@
|
|||
}
|
||||
},
|
||||
"@uiw/react-md-editor": {
|
||||
"version": "3.19.5",
|
||||
"resolved": "https://registry.npmjs.org/@uiw/react-md-editor/-/react-md-editor-3.19.5.tgz",
|
||||
"integrity": "sha512-uhFGLOrKEtADM8QUauTdG5x8wqNS15ry2jCYMBr/E1Or3njvg7jpB0KRv+QTgZDnglevCTjuQxZPH7I7hG2uKw==",
|
||||
"version": "3.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@uiw/react-md-editor/-/react-md-editor-3.20.2.tgz",
|
||||
"integrity": "sha512-L3sp3dsbpTOcVX+mzkmwwDl2Jl/UEgrySTun4XCMck1QF1WX53z0sHbN6XET+veHOML9Tw8TUUECR7IqajYjDw==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.14.6",
|
||||
"@uiw/react-markdown-preview": "^4.1.5",
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
"@types/node": "^18.6.5",
|
||||
"@types/react": "^18.0.17",
|
||||
"@types/react-dom": "^18.0.6",
|
||||
"@uiw/react-md-editor": "^3.19.5",
|
||||
"@uiw/react-md-editor": "^3.20.2",
|
||||
"autoprefixer": "10.4.8",
|
||||
"axios": "^0.27.2",
|
||||
"bootstrap": "^5.2.0",
|
||||
|
@ -53,11 +53,9 @@
|
|||
"react-dom": "^18.2.0",
|
||||
"react-icons": "^4.4.0",
|
||||
"react-jsonschema-form": "^1.8.1",
|
||||
"react-markdown": "^8.0.3",
|
||||
"react-router": "^6.3.0",
|
||||
"react-router-dom": "^6.3.0",
|
||||
"react-scripts": "^5.0.1",
|
||||
"remark-gfm": "^3.0.1",
|
||||
"serve": "^14.0.0",
|
||||
"timepicker": "^1.13.18",
|
||||
"typescript": "^4.7.4",
|
||||
|
|
|
@ -35,7 +35,7 @@ export default function ProcessGroupForm({
|
|||
};
|
||||
|
||||
const hasValidIdentifier = (identifierToCheck: string) => {
|
||||
return identifierToCheck.match(/^[a-z0-9][0-9a-z-]+[a-z0-9]$/);
|
||||
return identifierToCheck.match(/^[a-z0-9][0-9a-z-]*[a-z0-9]$/);
|
||||
};
|
||||
|
||||
const handleFormSubmission = (event: any) => {
|
||||
|
|
|
@ -12,8 +12,7 @@ import {
|
|||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
|
||||
import ReactMarkdown from 'react-markdown';
|
||||
import remarkGfm from 'remark-gfm';
|
||||
import MDEditor from '@uiw/react-md-editor';
|
||||
// eslint-disable-next-line import/no-named-as-default
|
||||
import Form from '../themes/carbon';
|
||||
import HttpService from '../services/HttpService';
|
||||
|
@ -26,6 +25,7 @@ export default function TaskShow() {
|
|||
const [userTasks, setUserTasks] = useState(null);
|
||||
const params = useParams();
|
||||
const navigate = useNavigate();
|
||||
const [disabled, setDisabled] = useState(false);
|
||||
|
||||
const { addError, removeError } = useAPIError();
|
||||
|
||||
|
@ -40,8 +40,13 @@ export default function TaskShow() {
|
|||
// instead of passing the process model identifier in through the params
|
||||
HttpService.makeCallToBackend({
|
||||
path: url,
|
||||
successCallback: setUserTasks,
|
||||
onUnauthorized: () => {},
|
||||
successCallback: (tasks: any) => {
|
||||
setDisabled(false);
|
||||
setUserTasks(tasks);
|
||||
},
|
||||
onUnauthorized: () => {
|
||||
setDisabled(false);
|
||||
},
|
||||
failureCallback: (error: any) => {
|
||||
addError(error);
|
||||
},
|
||||
|
@ -68,13 +73,20 @@ export default function TaskShow() {
|
|||
};
|
||||
|
||||
const handleFormSubmit = (event: any) => {
|
||||
if (disabled) {
|
||||
return;
|
||||
}
|
||||
setDisabled(true);
|
||||
removeError();
|
||||
const dataToSubmit = event.formData;
|
||||
delete dataToSubmit.isManualTask;
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks/${params.process_instance_id}/${params.task_id}`,
|
||||
successCallback: processSubmitResult,
|
||||
failureCallback: addError,
|
||||
failureCallback: (error: any) => {
|
||||
addError(error);
|
||||
setDisabled(false);
|
||||
},
|
||||
httpMethod: 'PUT',
|
||||
postBody: dataToSubmit,
|
||||
});
|
||||
|
@ -189,7 +201,7 @@ export default function TaskShow() {
|
|||
},
|
||||
};
|
||||
} else if (task.form_ui_schema) {
|
||||
formUiSchema = JSON.parse(task.form_ui_schema);
|
||||
formUiSchema = task.form_ui_schema;
|
||||
}
|
||||
if (task.state !== 'READY') {
|
||||
formUiSchema = Object.assign(formUiSchema || {}, {
|
||||
|
@ -203,10 +215,16 @@ export default function TaskShow() {
|
|||
reactFragmentToHideSubmitButton = <div />;
|
||||
}
|
||||
|
||||
if (task.type === 'Manual Task' && task.state === 'READY') {
|
||||
if (task.state === 'READY') {
|
||||
let buttonText = 'Submit';
|
||||
if (task.type === 'Manual Task') {
|
||||
buttonText = 'Continue';
|
||||
}
|
||||
reactFragmentToHideSubmitButton = (
|
||||
<div>
|
||||
<Button type="submit">Continue</Button>
|
||||
<Button type="submit" disabled={disabled}>
|
||||
{buttonText}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -219,6 +237,7 @@ export default function TaskShow() {
|
|||
<Grid fullWidth condensed>
|
||||
<Column sm={4} md={5} lg={8}>
|
||||
<Form
|
||||
disabled={disabled}
|
||||
formData={taskData}
|
||||
onSubmit={handleFormSubmit}
|
||||
schema={jsonSchema}
|
||||
|
@ -243,9 +262,7 @@ export default function TaskShow() {
|
|||
}
|
||||
return (
|
||||
<div className="markdown">
|
||||
<ReactMarkdown remarkPlugins={[remarkGfm]}>
|
||||
{instructions}
|
||||
</ReactMarkdown>
|
||||
<MDEditor.Markdown source={instructions} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -32,9 +32,6 @@ const RadioWidget = ({
|
|||
|
||||
return (
|
||||
<>
|
||||
<FormLabel required={required} htmlFor={id}>
|
||||
{label || schema.title}
|
||||
</FormLabel>
|
||||
<RadioGroup
|
||||
id={id}
|
||||
name={id}
|
||||
|
|
Loading…
Reference in New Issue