Merge branch 'main' into feature/more_better_errors
This commit is contained in:
commit
009a5b1768
|
@ -10,7 +10,6 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
for subtree in "SpiffWorkflow" \
|
for subtree in "SpiffWorkflow" \
|
||||||
"spiffworkflow-backend" \
|
"spiffworkflow-backend" \
|
||||||
"spiffworkflow-frontend" \
|
"spiffworkflow-frontend" \
|
||||||
"flask-bpmn" \
|
|
||||||
"bpmn-js-spiffworkflow" \
|
"bpmn-js-spiffworkflow" \
|
||||||
"connector-proxy-demo"
|
"connector-proxy-demo"
|
||||||
do
|
do
|
||||||
|
|
|
@ -1760,7 +1760,7 @@ lxml = "*"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331"
|
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlalchemy"
|
name = "sqlalchemy"
|
||||||
|
|
|
@ -1,28 +1,43 @@
|
||||||
FROM ghcr.io/sartography/python:3.11
|
# Base image to share ENV vars that activate VENV.
|
||||||
|
FROM ghcr.io/sartography/python:3.11 AS base
|
||||||
|
|
||||||
|
ENV VIRTUAL_ENV=/app/venv
|
||||||
|
RUN python3 -m venv $VIRTUAL_ENV
|
||||||
|
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much.
|
||||||
|
FROM base AS deployment
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get clean -y \
|
||||||
|
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Setup image for installing Python dependencies.
|
||||||
|
FROM base AS setup
|
||||||
|
|
||||||
RUN pip install poetry
|
RUN pip install poetry
|
||||||
RUN useradd _gunicorn --no-create-home --user-group
|
RUN useradd _gunicorn --no-create-home --user-group
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update \
|
||||||
apt-get install -y -q \
|
&& apt-get install -y -q gcc libssl-dev libpq-dev
|
||||||
gcc libssl-dev \
|
|
||||||
curl git-core libpq-dev \
|
|
||||||
gunicorn3 default-mysql-client
|
|
||||||
|
|
||||||
WORKDIR /app
|
# poetry install takes a long time and can be cached if dependencies don't change,
|
||||||
|
# so that's why we tolerate running it twice.
|
||||||
COPY pyproject.toml poetry.lock /app/
|
COPY pyproject.toml poetry.lock /app/
|
||||||
RUN poetry install --without dev
|
RUN poetry install --without dev
|
||||||
|
|
||||||
RUN set -xe \
|
COPY . /app
|
||||||
&& apt-get remove -y gcc python3-dev libssl-dev \
|
|
||||||
&& apt-get autoremove -y \
|
|
||||||
&& apt-get clean -y \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
COPY . /app/
|
|
||||||
|
|
||||||
# run poetry install again AFTER copying the app into the image
|
|
||||||
# otherwise it does not know what the main app module is
|
|
||||||
RUN poetry install --without dev
|
RUN poetry install --without dev
|
||||||
|
|
||||||
CMD ./bin/boot_server_in_docker
|
# Final image without setup dependencies.
|
||||||
|
FROM deployment AS final
|
||||||
|
|
||||||
|
LABEL source="https://github.com/sartography/spiff-arena"
|
||||||
|
LABEL description="Software development platform for building, running, and monitoring executable diagrams"
|
||||||
|
|
||||||
|
COPY --from=setup /app /app
|
||||||
|
|
||||||
|
CMD ["./bin/boot_server_in_docker"]
|
||||||
|
|
|
@ -7,7 +7,13 @@ function error_handler() {
|
||||||
trap 'error_handler ${LINENO} $?' ERR
|
trap 'error_handler ${LINENO} $?' ERR
|
||||||
set -o errtrace -o errexit -o nounset -o pipefail
|
set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
# you can get a list of users from the keycloak realm file like:
|
||||||
|
# grep '"email" :' keycloak/realm_exports/spiffworkflow-realm.json | awk -F : '{print $2}' | sed -E 's/ "//g' | sed -E 's/",//g' > s
|
||||||
|
|
||||||
|
# we keep some of these in keycloak/test_user_lists
|
||||||
|
# spiffworkflow-realm.json is a mashup of the status and sartography user lists.
|
||||||
user_file_with_one_email_per_line="${1:-}"
|
user_file_with_one_email_per_line="${1:-}"
|
||||||
|
|
||||||
keycloak_realm="${2:-spiffworkflow}"
|
keycloak_realm="${2:-spiffworkflow}"
|
||||||
if [[ -z "${1:-}" ]]; then
|
if [[ -z "${1:-}" ]]; then
|
||||||
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"
|
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"
|
||||||
|
|
|
@ -21,6 +21,9 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa
|
||||||
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
|
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
|
||||||
|
|
||||||
for realm in $realms ; do
|
for realm in $realms ; do
|
||||||
|
if ! grep -Eq '\-realm$' <<< "$realm"; then
|
||||||
|
realm="${realm}-realm"
|
||||||
|
fi
|
||||||
cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/"
|
cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
|
@ -547,7 +547,7 @@
|
||||||
"enabled" : true,
|
"enabled" : true,
|
||||||
"totp" : false,
|
"totp" : false,
|
||||||
"emailVerified" : false,
|
"emailVerified" : false,
|
||||||
"email" : "kevin@sartography.com",
|
"email" : "kb@sartography.com",
|
||||||
"credentials" : [ {
|
"credentials" : [ {
|
||||||
"id" : "4057e784-689d-47c0-a164-035a69e78edf",
|
"id" : "4057e784-689d-47c0-a164-035a69e78edf",
|
||||||
"type" : "password",
|
"type" : "password",
|
||||||
|
|
|
@ -854,6 +854,46 @@
|
||||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "672167fd-ae79-47a7-8429-f3bb1bd4ee55",
|
||||||
|
"createdTimestamp" : 1675349217829,
|
||||||
|
"username" : "infra1.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "infra1.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "bd5843bf-98cc-4891-ab03-693a5d69078b",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349217863,
|
||||||
|
"secretData" : "{\"value\":\"A78sm/+e2x/N/3A7Pk05eKhfANp+ZO9BQA3LYMwpzQ5KK2D/Ot8d1plOnqMT61rTnnCgxP8dtlA6/Ws61CMTYg==\",\"salt\":\"XOOknamJPwXD1LDj6LEodA==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "40891b68-121f-4fdb-86c0-0f52836d7e65",
|
||||||
|
"createdTimestamp" : 1675349217890,
|
||||||
|
"username" : "infra2.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "infra2.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "7e9927e2-ef7f-4247-b663-1f59147a9066",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349217926,
|
||||||
|
"secretData" : "{\"value\":\"j4M9u8p9FDCitGpb7JXM9JWFVGvBu7R2TOYG79c+Witl7gfWppues9fFzhlFyXgC78v6diHoQ4LwCwJGJS3loQ==\",\"salt\":\"H+i8qv6ulrBEZla/v8gDDw==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "1561518b-c327-491e-9db3-23c2b5394104",
|
"id" : "1561518b-c327-491e-9db3-23c2b5394104",
|
||||||
"createdTimestamp" : 1669303773974,
|
"createdTimestamp" : 1669303773974,
|
||||||
|
@ -863,7 +903,7 @@
|
||||||
"emailVerified" : false,
|
"emailVerified" : false,
|
||||||
"firstName" : "",
|
"firstName" : "",
|
||||||
"lastName" : "",
|
"lastName" : "",
|
||||||
"email" : "j@status.im",
|
"email" : "j@sartography.com",
|
||||||
"credentials" : [ {
|
"credentials" : [ {
|
||||||
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
|
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
|
||||||
"type" : "password",
|
"type" : "password",
|
||||||
|
@ -1043,6 +1083,86 @@
|
||||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "e911fb0f-fd07-4886-acbf-d00930d293d3",
|
||||||
|
"createdTimestamp" : 1675447845512,
|
||||||
|
"username" : "legal.program-lead",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "legal.program-lead@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "9676d8d3-1e8c-4f5d-b5f7-49745cecf8fd",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845577,
|
||||||
|
"secretData" : "{\"value\":\"vTffScfGXIjWWyDDfzo7JPiJe9VjAtrmds382EeV7N+wYNapJmLTVModkBsmGPy4TmWLc9BoysQynOaanSGi9Q==\",\"salt\":\"67ZxTEnar8aq4LZLhSNTFg==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "eff82d12-9a67-4002-b3c5-37811bd45199",
|
||||||
|
"createdTimestamp" : 1675349217585,
|
||||||
|
"username" : "legal.program-lead.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "legal.program-lead.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "933e3fc4-398a-46c3-bc4d-783ab29a0a5b",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349217655,
|
||||||
|
"secretData" : "{\"value\":\"x2M9khnGK+VCykoWbZKEcHNv5QMAcumqLa7+o+STJV8UYt7BobSBn7w1r3cbyYlvkgoWIglG8S2nLDFFb6hAQg==\",\"salt\":\"/lQYRrsUY1BxNUOZSKaZwA==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "4ed2b5a2-16c2-4029-ae97-d75c60f2147f",
|
||||||
|
"createdTimestamp" : 1675447845616,
|
||||||
|
"username" : "legal.project-lead",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "legal.project-lead@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "fd0b0d0a-8a3e-48c9-b17b-023e87057048",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845652,
|
||||||
|
"secretData" : "{\"value\":\"l/DPfNBcHINV8lCf9nEyCJkFvaMGnLqcd1Y8t9taLqxb8r/ofY2ce79C19JCHDQJXRPRuCsMoobuFhhNR6aQmg==\",\"salt\":\"2ivCPrNc56396ldlwpQP6Q==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "8cd6feba-5ca6-4cfb-bc1a-a52c80595783",
|
||||||
|
"createdTimestamp" : 1675349217698,
|
||||||
|
"username" : "legal.project-lead.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "legal.project-lead.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "908f858c-d3cd-47a9-b611-a1d48f0247e5",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349217733,
|
||||||
|
"secretData" : "{\"value\":\"r53SXu0dp6FrSJAVLHYrfwSKPZY9OKHfHBuJDEE2DCbZiQRH77C4sZWfUwbu/6OOhTtiBEe7gz2DQpimIDY4RQ==\",\"salt\":\"+g/OXXJEMkQiahmjSylAkw==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "2a3176a0-8dd5-4223-a3e1-3cac4134e474",
|
"id" : "2a3176a0-8dd5-4223-a3e1-3cac4134e474",
|
||||||
"createdTimestamp" : 1674148695030,
|
"createdTimestamp" : 1674148695030,
|
||||||
|
@ -1063,6 +1183,46 @@
|
||||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "3d62ca4e-88bc-4302-89c1-8741c771147e",
|
||||||
|
"createdTimestamp" : 1675349217762,
|
||||||
|
"username" : "legal1.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "legal1.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "b774d46d-a3e8-417f-97c6-2d2102a54b0b",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349217799,
|
||||||
|
"secretData" : "{\"value\":\"PF21YsnIoYZLJFT/y1i2FV4OmaQj8dRsalZ9R2PK6t/jKze3ds4k+I7WVe4h2H0hMB9fo9cSQ7kt2ygxfEBheg==\",\"salt\":\"5sOkSXzRSgNz7lHfUbKzdQ==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "99ce8a54-2941-4767-8ddf-52320b3708bd",
|
||||||
|
"createdTimestamp" : 1675447085191,
|
||||||
|
"username" : "madhurya",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "madhurya@sartography.com",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "4fa2bf1f-188e-42e3-9633-01d436864206",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447085252,
|
||||||
|
"secretData" : "{\"value\":\"6ZApQ7kx4YDc5ojW9eyFiSKMz5l3/Zl5PIScHEW1gtP3lrnnWqWgwcP+8cWkKdm3im+XrZwDQHjuGjGN5Rbjyw==\",\"salt\":\"HT3fCh245v8etRFIprXsyw==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7",
|
"id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7",
|
||||||
"createdTimestamp" : 1665517010600,
|
"createdTimestamp" : 1665517010600,
|
||||||
|
@ -1185,6 +1345,86 @@
|
||||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "9f703c96-02f1-403c-b070-25feb86cfe21",
|
||||||
|
"createdTimestamp" : 1675447845811,
|
||||||
|
"username" : "ppg.ba.program-lead",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "ppg.ba.program-lead@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "bf74118b-b28f-4d2f-8bfa-7b9d1a8345f2",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845847,
|
||||||
|
"secretData" : "{\"value\":\"wFUAB6E98gE222nCfsKe6P3kSZxeOSjhflsxon8kw/dY4ZwN0KMwvlYuNhmoptTLqDQJyqUiydmlMK0NS4JjTQ==\",\"salt\":\"YCPk4Tc3eXcoes78oLhDEg==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "81a1727b-c846-4af9-8d95-1c50b1deb0d5",
|
||||||
|
"createdTimestamp" : 1675447845879,
|
||||||
|
"username" : "ppg.ba.project-lead",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "ppg.ba.project-lead@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "6411830d-6015-4cf2-bac6-d49c26510319",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845915,
|
||||||
|
"secretData" : "{\"value\":\"1+m8twycOEbA4X61zN7dLENqp2IxxQZrXKaf3mEuzmxouHrgxvmXudwC6DWyfjXvLm7gxWlaa4cofBFwr1idig==\",\"salt\":\"UEKUSScYv2xY+rJ8vlvF4A==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "1d4d471a-b3ef-4750-97c4-a9e64eb8f414",
|
||||||
|
"createdTimestamp" : 1675447845942,
|
||||||
|
"username" : "ppg.ba.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "ppg.ba.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "6512f88a-cbcc-4d79-be17-1d132ba11e64",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845977,
|
||||||
|
"secretData" : "{\"value\":\"EErx/3vG+lh4DgrJUzkBv4cLT3sK1gS+T9KD5V/JpvJUmJpRFQqpk+YxC/nC/kTGLIpRDdCIN690T84FlOIjew==\",\"salt\":\"FPeVGnFbt9TRNiORMB5LMQ==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "2dade29f-c6dc-445b-bdf0-eed316bdb638",
|
||||||
|
"createdTimestamp" : 1675447846003,
|
||||||
|
"username" : "ppg.ba.sme1",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "ppg.ba.sme1@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "ccf2d138-020a-4a29-b63d-1f4d2f415639",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447846038,
|
||||||
|
"secretData" : "{\"value\":\"BtSJtW/8lCtyrDPTXzhsyT/32H+pOHx9thKqJV30dOEZ9wcSQbrRSHoQbXwLos+sIiA82X3wm+qObdQoD5guVQ==\",\"salt\":\"nSbgxYpVGaMz2ArmqLCN6Q==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "c3ea06ee-c497-48e6-8816-43c8ef68bd8b",
|
"id" : "c3ea06ee-c497-48e6-8816-43c8ef68bd8b",
|
||||||
"createdTimestamp" : 1674148694747,
|
"createdTimestamp" : 1674148694747,
|
||||||
|
@ -1225,6 +1465,86 @@
|
||||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "c21c075d-9ac5-40a1-964a-c1d6ffe17257",
|
||||||
|
"createdTimestamp" : 1675447845680,
|
||||||
|
"username" : "security.program-lead",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "security.program-lead@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "d1401dbd-a88b-44a6-b13c-fff13ee07e0c",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845718,
|
||||||
|
"secretData" : "{\"value\":\"3D76RpIFG0/ixbSBeJfCc61kyL8PvVn/khA8FOy6RLg2hrZbs1Uwl8SmplnSUll1wD5a/BoobsO7v1XW4TCvwQ==\",\"salt\":\"YtDRRmBV4SBlO/oX23r2EQ==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "ace0432f-1818-4210-8bcf-15533abfb3ce",
|
||||||
|
"createdTimestamp" : 1675349217958,
|
||||||
|
"username" : "security.program-lead.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "security.program-lead.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "602512dd-b24f-458c-9cef-7271bd8177bc",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349217993,
|
||||||
|
"secretData" : "{\"value\":\"vUb+t9ukHz3oHGUxaYUP34riZrshZU4c3iWpHB0OzI3y0ggCeT9xFEcmrwdkfilkKvCBJxLswlirWmgnmxZH0w==\",\"salt\":\"0hzZkDK4hPH5xgR1TpyG1Q==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "34dfacfd-24b5-414e-ac3e-9b013399aee2",
|
||||||
|
"createdTimestamp" : 1675447845747,
|
||||||
|
"username" : "security.project-lead",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "security.project-lead@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "cb5d8a8a-e7d0-40e4-878b-a33608cb76c8",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675447845784,
|
||||||
|
"secretData" : "{\"value\":\"rudimVOjVwJeO/1RLuyHySEaSQMzjHqPQrh5Pmfr4L2PgP/1oDKLVB38pKOohlbTarDcbAfMHB7AFYAPn9kuIg==\",\"salt\":\"cOkkUBOx/4AVUSa3Ozsiuw==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "6272ac80-1d79-4e3c-a5c1-b31660560318",
|
||||||
|
"createdTimestamp" : 1675349218020,
|
||||||
|
"username" : "security.project-lead.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "security.project-lead.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "eb7673bf-50f1-40af-927b-162f536f6187",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349218054,
|
||||||
|
"secretData" : "{\"value\":\"E1eLmC7hCcv7I5X30TfMvpZv3MtHH+rVhgLrZnBJSUvsrXmRkHWScJ/POHQLwUgCLJeU/lKDP/f0TdO2PvHiow==\",\"salt\":\"dWM5XJIR7m/eZ0YlHmuC3A==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "74374cda-1516-48e5-9ef2-1fd7bcee84d3",
|
"id" : "74374cda-1516-48e5-9ef2-1fd7bcee84d3",
|
||||||
"createdTimestamp" : 1674148695088,
|
"createdTimestamp" : 1674148695088,
|
||||||
|
@ -1246,13 +1566,32 @@
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "487d3a85-89dd-4839-957a-c3f6d70551f6",
|
"id" : "98faab0c-d2af-4794-8491-03dad5f30c63",
|
||||||
"createdTimestamp" : 1657115173081,
|
"createdTimestamp" : 1675349218087,
|
||||||
|
"username" : "security1.sme",
|
||||||
|
"enabled" : true,
|
||||||
|
"totp" : false,
|
||||||
|
"emailVerified" : false,
|
||||||
|
"email" : "security1.sme@status.im",
|
||||||
|
"credentials" : [ {
|
||||||
|
"id" : "37bd6b9b-015b-4790-8a4f-883c47035bc4",
|
||||||
|
"type" : "password",
|
||||||
|
"createdDate" : 1675349218122,
|
||||||
|
"secretData" : "{\"value\":\"BJP9K4qIdnaDnE3meM2GLWMFdSJryxcZovtKDlZNaQXfSUH3X1mOJfaLXQsuTWJzSMIow8XZ5+ye47ZNabLCaQ==\",\"salt\":\"BqD7jPpdB7PzU6QTN5dpMA==\",\"additionalParameters\":{}}",
|
||||||
|
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||||
|
} ],
|
||||||
|
"disableableCredentialTypes" : [ ],
|
||||||
|
"requiredActions" : [ ],
|
||||||
|
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||||
|
"notBefore" : 0,
|
||||||
|
"groups" : [ ]
|
||||||
|
}, {
|
||||||
|
"id" : "b768e3ef-f905-4493-976c-bc3408c04bec",
|
||||||
|
"createdTimestamp" : 1675447832524,
|
||||||
"username" : "service-account-spiffworkflow-backend",
|
"username" : "service-account-spiffworkflow-backend",
|
||||||
"enabled" : true,
|
"enabled" : true,
|
||||||
"totp" : false,
|
"totp" : false,
|
||||||
"emailVerified" : false,
|
"emailVerified" : false,
|
||||||
"email" : "service-account@status.im",
|
|
||||||
"serviceAccountClientId" : "spiffworkflow-backend",
|
"serviceAccountClientId" : "spiffworkflow-backend",
|
||||||
"credentials" : [ ],
|
"credentials" : [ ],
|
||||||
"disableableCredentialTypes" : [ ],
|
"disableableCredentialTypes" : [ ],
|
||||||
|
@ -1264,13 +1603,12 @@
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"groups" : [ ]
|
"groups" : [ ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "22de68b1-4b06-4bc2-8da6-0c577e7e62ad",
|
"id" : "b6fb214b-cb8a-4403-9308-ac6d4e13ef26",
|
||||||
"createdTimestamp" : 1657055472800,
|
"createdTimestamp" : 1675447832560,
|
||||||
"username" : "service-account-withauth",
|
"username" : "service-account-withauth",
|
||||||
"enabled" : true,
|
"enabled" : true,
|
||||||
"totp" : false,
|
"totp" : false,
|
||||||
"emailVerified" : false,
|
"emailVerified" : false,
|
||||||
"email" : "service-account-withauth@status.im",
|
|
||||||
"serviceAccountClientId" : "withAuth",
|
"serviceAccountClientId" : "withAuth",
|
||||||
"credentials" : [ ],
|
"credentials" : [ ],
|
||||||
"disableableCredentialTypes" : [ ],
|
"disableableCredentialTypes" : [ ],
|
||||||
|
@ -2514,7 +2852,7 @@
|
||||||
"subType" : "authenticated",
|
"subType" : "authenticated",
|
||||||
"subComponents" : { },
|
"subComponents" : { },
|
||||||
"config" : {
|
"config" : {
|
||||||
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-full-name-mapper" ]
|
"allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper" ]
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
||||||
|
@ -2532,7 +2870,7 @@
|
||||||
"subType" : "anonymous",
|
"subType" : "anonymous",
|
||||||
"subComponents" : { },
|
"subComponents" : { },
|
||||||
"config" : {
|
"config" : {
|
||||||
"allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper" ]
|
"allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper" ]
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
||||||
|
@ -2622,7 +2960,7 @@
|
||||||
"internationalizationEnabled" : false,
|
"internationalizationEnabled" : false,
|
||||||
"supportedLocales" : [ ],
|
"supportedLocales" : [ ],
|
||||||
"authenticationFlows" : [ {
|
"authenticationFlows" : [ {
|
||||||
"id" : "a91920d9-792e-486f-9a02-49fe00857ce5",
|
"id" : "cb39eda2-18c2-4b03-9d7c-672a2bd47d19",
|
||||||
"alias" : "Account verification options",
|
"alias" : "Account verification options",
|
||||||
"description" : "Method with which to verity the existing account",
|
"description" : "Method with which to verity the existing account",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2644,7 +2982,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "6b8f504c-39fb-4608-9223-52deb5ae0dfe",
|
"id" : "96d4e28f-51ad-4737-87b4-5a10484ceb8b",
|
||||||
"alias" : "Authentication Options",
|
"alias" : "Authentication Options",
|
||||||
"description" : "Authentication options.",
|
"description" : "Authentication options.",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2673,7 +3011,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "ac4dd6f3-43b2-4212-90eb-4df7c9a6a0bc",
|
"id" : "8f4c884d-93cd-4404-bc3a-1fa717b070c5",
|
||||||
"alias" : "Browser - Conditional OTP",
|
"alias" : "Browser - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2695,7 +3033,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "726b4a58-cb78-4105-a34c-3e4404c74362",
|
"id" : "166d1879-dd61-4fb4-b4f6-0a4d69f49da8",
|
||||||
"alias" : "Direct Grant - Conditional OTP",
|
"alias" : "Direct Grant - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2717,7 +3055,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "be1b5f5d-b80f-46a6-804b-bce20e2de246",
|
"id" : "18cab8f9-f010-4226-a86e-8da2f1632304",
|
||||||
"alias" : "First broker login - Conditional OTP",
|
"alias" : "First broker login - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2739,7 +3077,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "ff5097d8-818a-4176-8512-caf9d81eb6db",
|
"id" : "04d8d1d1-5253-4644-b55d-8c9317818b33",
|
||||||
"alias" : "Handle Existing Account",
|
"alias" : "Handle Existing Account",
|
||||||
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2761,7 +3099,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "b9ecf989-e87b-45c0-a440-bce46b473dec",
|
"id" : "2bf21e1d-ff7e-4d52-8be7-31355945c302",
|
||||||
"alias" : "Reset - Conditional OTP",
|
"alias" : "Reset - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2783,7 +3121,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "4554310c-e125-4834-a84e-53bbec7a79d6",
|
"id" : "fa8636a5-9969-41a5-9fef-9c825cceb819",
|
||||||
"alias" : "User creation or linking",
|
"alias" : "User creation or linking",
|
||||||
"description" : "Flow for the existing/non-existing user alternatives",
|
"description" : "Flow for the existing/non-existing user alternatives",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2806,7 +3144,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "204549aa-c931-45a2-b2f0-1a5a0c724935",
|
"id" : "8656a884-6645-40b5-b075-c40736e27811",
|
||||||
"alias" : "Verify Existing Account by Re-authentication",
|
"alias" : "Verify Existing Account by Re-authentication",
|
||||||
"description" : "Reauthentication of existing account",
|
"description" : "Reauthentication of existing account",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2828,7 +3166,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "d02f58b1-6469-46ea-a348-d923b5aa9727",
|
"id" : "0d88d334-bfa4-4cf1-9fa3-17d0df0151d1",
|
||||||
"alias" : "browser",
|
"alias" : "browser",
|
||||||
"description" : "browser based authentication",
|
"description" : "browser based authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2864,7 +3202,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "7ef6a658-be09-4b81-91ac-f21dc80b0841",
|
"id" : "9b195d67-e3e6-4983-8607-533b739ebd97",
|
||||||
"alias" : "clients",
|
"alias" : "clients",
|
||||||
"description" : "Base authentication for clients",
|
"description" : "Base authentication for clients",
|
||||||
"providerId" : "client-flow",
|
"providerId" : "client-flow",
|
||||||
|
@ -2900,7 +3238,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "f7f2eeab-6455-4a18-a98d-b1a5f04e35fb",
|
"id" : "fd0273a1-f6f4-4df1-a057-54ac4e91f4a9",
|
||||||
"alias" : "direct grant",
|
"alias" : "direct grant",
|
||||||
"description" : "OpenID Connect Resource Owner Grant",
|
"description" : "OpenID Connect Resource Owner Grant",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2929,7 +3267,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "c44389c2-08b2-4adb-a6e9-e41006cb20c7",
|
"id" : "b457cba8-ef31-473b-a481-c095b2f4eb48",
|
||||||
"alias" : "docker auth",
|
"alias" : "docker auth",
|
||||||
"description" : "Used by Docker clients to authenticate against the IDP",
|
"description" : "Used by Docker clients to authenticate against the IDP",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2944,7 +3282,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "edf00de8-8f19-4a32-98c4-15e719c1fadd",
|
"id" : "97519504-fd69-4c08-bd27-15d26fbc9b76",
|
||||||
"alias" : "first broker login",
|
"alias" : "first broker login",
|
||||||
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2967,7 +3305,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "58415605-eb47-41b3-a07f-90bbbbcb9963",
|
"id" : "fc6a4468-1a78-410d-ac97-cf9f05814850",
|
||||||
"alias" : "forms",
|
"alias" : "forms",
|
||||||
"description" : "Username, password, otp and other auth forms.",
|
"description" : "Username, password, otp and other auth forms.",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2989,7 +3327,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "1eae6099-3e1e-484b-ad94-b09339affb68",
|
"id" : "97a25d8a-25a0-4bf4-be6d-a6f019cf3a32",
|
||||||
"alias" : "http challenge",
|
"alias" : "http challenge",
|
||||||
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -3011,7 +3349,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "8af03739-b77a-4582-ab63-a1855ca4f637",
|
"id" : "671e8ec7-af31-4c54-b6bb-96ebe69881de",
|
||||||
"alias" : "registration",
|
"alias" : "registration",
|
||||||
"description" : "registration flow",
|
"description" : "registration flow",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -3027,7 +3365,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "0c308998-c5ad-4cf8-ab5c-15be89cbe4d7",
|
"id" : "24d6aaaa-5202-4401-99c3-bb15925bd5be",
|
||||||
"alias" : "registration form",
|
"alias" : "registration form",
|
||||||
"description" : "registration form",
|
"description" : "registration form",
|
||||||
"providerId" : "form-flow",
|
"providerId" : "form-flow",
|
||||||
|
@ -3063,7 +3401,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "5510aa65-e78d-4d08-a3ca-31e277bc3cd0",
|
"id" : "f948bd43-ff05-4245-be30-a0a0dad2b7f0",
|
||||||
"alias" : "reset credentials",
|
"alias" : "reset credentials",
|
||||||
"description" : "Reset credentials for a user if they forgot their password or something",
|
"description" : "Reset credentials for a user if they forgot their password or something",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -3099,7 +3437,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "b6b3e35d-8df3-487e-b2d2-9fdf524a4181",
|
"id" : "7e4aaea7-05ca-4aa0-b934-4c81614620a8",
|
||||||
"alias" : "saml ecp",
|
"alias" : "saml ecp",
|
||||||
"description" : "SAML ECP Profile Authentication Flow",
|
"description" : "SAML ECP Profile Authentication Flow",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -3115,13 +3453,13 @@
|
||||||
} ]
|
} ]
|
||||||
} ],
|
} ],
|
||||||
"authenticatorConfig" : [ {
|
"authenticatorConfig" : [ {
|
||||||
"id" : "a2e9294b-74ce-4ea6-8372-9d9fb3d60a06",
|
"id" : "14ca1058-25e7-41f6-85ce-ad0bfce2c67c",
|
||||||
"alias" : "create unique user config",
|
"alias" : "create unique user config",
|
||||||
"config" : {
|
"config" : {
|
||||||
"require.password.update.after.registration" : "false"
|
"require.password.update.after.registration" : "false"
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"id" : "de65a90c-cc4b-4bf0-8e84-756e23a504f0",
|
"id" : "16803de1-f7dc-4293-acde-fd0eae264377",
|
||||||
"alias" : "review profile config",
|
"alias" : "review profile config",
|
||||||
"config" : {
|
"config" : {
|
||||||
"update.profile.on.first.login" : "missing"
|
"update.profile.on.first.login" : "missing"
|
||||||
|
@ -3216,4 +3554,4 @@
|
||||||
"clientPolicies" : {
|
"clientPolicies" : {
|
||||||
"policies" : [ ]
|
"policies" : [ ]
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,8 +1,13 @@
|
||||||
|
admin@spiffworkflow.org
|
||||||
alex@sartography.com
|
alex@sartography.com
|
||||||
dan@sartography.com
|
dan@sartography.com
|
||||||
kevin@sartography.com
|
daniel@sartography.com
|
||||||
jason@sartography.com
|
|
||||||
mike@sartography.com
|
|
||||||
elizabeth@sartography.com
|
elizabeth@sartography.com
|
||||||
|
j@sartography.com
|
||||||
|
jason@sartography.com
|
||||||
jon@sartography.com
|
jon@sartography.com
|
||||||
|
kb@sartography.com
|
||||||
|
kevin@sartography.com
|
||||||
|
madhurya@sartography.com
|
||||||
|
mike@sartography.com
|
||||||
natalia@sartography.com
|
natalia@sartography.com
|
||||||
|
|
|
@ -1,17 +1,46 @@
|
||||||
|
admin@spiffworkflow.org
|
||||||
|
amir@status.im
|
||||||
|
app.program.lead@status.im
|
||||||
|
core@status.im
|
||||||
|
dao.project.lead@status.im
|
||||||
|
desktop.program.lead@status.im
|
||||||
|
desktop.project.lead@status.im
|
||||||
|
fin1@status.im
|
||||||
|
fin@status.im
|
||||||
finance.lead@status.im
|
finance.lead@status.im
|
||||||
legal.lead@status.im
|
|
||||||
program.lead@status.im
|
|
||||||
services.lead@status.im
|
|
||||||
finance.sme@status.im
|
finance.sme@status.im
|
||||||
infra.sme@status.im
|
finance_user1@status.im
|
||||||
legal.sme@status.im
|
harmeet@status.im
|
||||||
security.sme@status.im
|
|
||||||
ppg.ba@status.im
|
|
||||||
peopleops.partner@status.im
|
|
||||||
peopleops.talent@status.im
|
|
||||||
infra.program-lead@status.im
|
infra.program-lead@status.im
|
||||||
infra.project-lead@status.im
|
infra.project-lead@status.im
|
||||||
dao.project.lead@status.im
|
infra.sme@status.im
|
||||||
desktop.project.lead@status.im
|
infra1.sme@status.im
|
||||||
app.program.lead@status.im
|
infra2.sme@status.im
|
||||||
desktop.program.lead@status.im
|
jakub@status.im
|
||||||
|
jarrad@status.im
|
||||||
|
lead1@status.im
|
||||||
|
lead@status.im
|
||||||
|
legal.lead@status.im
|
||||||
|
legal.program-lead.sme@status.im
|
||||||
|
legal.program-lead@status.im
|
||||||
|
legal.project-lead.sme@status.im
|
||||||
|
legal.project-lead@status.im
|
||||||
|
legal.sme@status.im
|
||||||
|
legal1.sme@status.im
|
||||||
|
manuchehr@status.im
|
||||||
|
peopleops.partner@status.im
|
||||||
|
peopleops.talent@status.im
|
||||||
|
ppg.ba.program-lead@status.im
|
||||||
|
ppg.ba.project-lead@status.im
|
||||||
|
ppg.ba.sme1@status.im
|
||||||
|
ppg.ba.sme@status.im
|
||||||
|
ppg.ba@status.im
|
||||||
|
program.lead@status.im
|
||||||
|
sasha@status.im
|
||||||
|
security.program-lead.sme@status.im
|
||||||
|
security.program-lead@status.im
|
||||||
|
security.project-lead.sme@status.im
|
||||||
|
security.project-lead@status.im
|
||||||
|
security.sme@status.im
|
||||||
|
security1.sme@status.im
|
||||||
|
services.lead@status.im
|
||||||
|
|
|
@ -1825,7 +1825,7 @@ lxml = "*"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331"
|
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "SQLAlchemy"
|
name = "SQLAlchemy"
|
||||||
|
|
|
@ -157,6 +157,29 @@ def get_hacked_up_app_for_script() -> flask.app.Flask:
|
||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
def traces_sampler(sampling_context: Any) -> Any:
|
||||||
|
# always inherit
|
||||||
|
if sampling_context["parent_sampled"] is not None:
|
||||||
|
return sampling_context["parent_sampled"]
|
||||||
|
|
||||||
|
if "wsgi_environ" in sampling_context:
|
||||||
|
wsgi_environ = sampling_context["wsgi_environ"]
|
||||||
|
path_info = wsgi_environ.get("PATH_INFO")
|
||||||
|
request_method = wsgi_environ.get("REQUEST_METHOD")
|
||||||
|
|
||||||
|
# tasks_controller.task_submit
|
||||||
|
# this is the current pain point as of 31 jan 2023.
|
||||||
|
if (
|
||||||
|
path_info
|
||||||
|
and path_info.startswith("/v1.0/tasks/")
|
||||||
|
and request_method == "PUT"
|
||||||
|
):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Default sample rate for all others (replaces traces_sample_rate)
|
||||||
|
return 0.01
|
||||||
|
|
||||||
|
|
||||||
def configure_sentry(app: flask.app.Flask) -> None:
|
def configure_sentry(app: flask.app.Flask) -> None:
|
||||||
"""Configure_sentry."""
|
"""Configure_sentry."""
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
|
@ -193,5 +216,10 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
||||||
# of transactions for performance monitoring.
|
# of transactions for performance monitoring.
|
||||||
# We recommend adjusting this value to less than 1(00%) in production.
|
# We recommend adjusting this value to less than 1(00%) in production.
|
||||||
traces_sample_rate=float(sentry_traces_sample_rate),
|
traces_sample_rate=float(sentry_traces_sample_rate),
|
||||||
|
traces_sampler=traces_sampler,
|
||||||
|
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
||||||
|
_experiments={
|
||||||
|
"profiles_sample_rate": 1,
|
||||||
|
},
|
||||||
before_send=before_send,
|
before_send=before_send,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1605,6 +1605,45 @@ paths:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/Workflow"
|
$ref: "#/components/schemas/Workflow"
|
||||||
|
|
||||||
|
/process-data-file-download/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}:
|
||||||
|
parameters:
|
||||||
|
- name: modified_process_model_identifier
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The modified id of an existing process model
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: process_instance_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The unique id of an existing process instance.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: process_data_identifier
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The identifier of the process data.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: index
|
||||||
|
in: query
|
||||||
|
required: false
|
||||||
|
description: The optional index of the value if key's value is an array
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
get:
|
||||||
|
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_file_download
|
||||||
|
summary: Download the file referneced in the process data value.
|
||||||
|
tags:
|
||||||
|
- Data Objects
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Fetch succeeded.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Workflow"
|
||||||
|
|
||||||
/send-event/{modified_process_model_identifier}/{process_instance_id}:
|
/send-event/{modified_process_model_identifier}/{process_instance_id}:
|
||||||
parameters:
|
parameters:
|
||||||
- name: modified_process_model_identifier
|
- name: modified_process_model_identifier
|
||||||
|
|
|
@ -82,13 +82,17 @@ def setup_config(app: Flask) -> None:
|
||||||
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
|
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
|
||||||
|
|
||||||
app.config["PERMISSIONS_FILE_FULLPATH"] = None
|
app.config["PERMISSIONS_FILE_FULLPATH"] = None
|
||||||
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
|
permissions_file_name = app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]
|
||||||
|
if permissions_file_name is not None:
|
||||||
app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join(
|
app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join(
|
||||||
app.root_path,
|
app.root_path,
|
||||||
"config",
|
"config",
|
||||||
"permissions",
|
"permissions",
|
||||||
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
|
permissions_file_name,
|
||||||
)
|
)
|
||||||
|
print(f"base_permissions: loaded permissions file: {permissions_file_name}")
|
||||||
|
else:
|
||||||
|
print("base_permissions: no permissions file loaded")
|
||||||
|
|
||||||
# unversioned (see .gitignore) config that can override everything and include secrets.
|
# unversioned (see .gitignore) config that can override everything and include secrets.
|
||||||
# src/spiffworkflow_backend/config/secrets.py
|
# src/spiffworkflow_backend/config/secrets.py
|
||||||
|
|
|
@ -69,6 +69,8 @@ GIT_BRANCH = environ.get("GIT_BRANCH")
|
||||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
|
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
|
||||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||||
GIT_SSH_PRIVATE_KEY = environ.get("GIT_SSH_PRIVATE_KEY")
|
GIT_SSH_PRIVATE_KEY = environ.get("GIT_SSH_PRIVATE_KEY")
|
||||||
|
GIT_USERNAME = environ.get("GIT_USERNAME")
|
||||||
|
GIT_USER_EMAIL = environ.get("GIT_USER_EMAIL")
|
||||||
|
|
||||||
# Datbase Configuration
|
# Datbase Configuration
|
||||||
SPIFF_DATABASE_TYPE = environ.get(
|
SPIFF_DATABASE_TYPE = environ.get(
|
||||||
|
|
|
@ -21,6 +21,11 @@ from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||||
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
|
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
|
||||||
from SpiffWorkflow.task import Task # type: ignore
|
from SpiffWorkflow.task import Task # type: ignore
|
||||||
|
|
||||||
|
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
|
||||||
|
|
||||||
|
|
||||||
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
|
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
|
||||||
|
|
||||||
|
@ -169,13 +174,30 @@ def set_user_sentry_context() -> None:
|
||||||
set_tag("username", username)
|
set_tag("username", username)
|
||||||
|
|
||||||
|
|
||||||
|
def should_notify_sentry(exception: Exception) -> bool:
|
||||||
|
"""Determine if we should notify sentry.
|
||||||
|
|
||||||
|
We want to capture_exception to log the exception to sentry, but we don't want to log:
|
||||||
|
1. ApiErrors that are just invalid tokens
|
||||||
|
2. NotAuthorizedError. we usually call check-permissions before calling an API to
|
||||||
|
make sure we'll have access, but there are some cases
|
||||||
|
where it's more convenient to just make the call from the frontend and handle the 403 appropriately.
|
||||||
|
"""
|
||||||
|
if isinstance(exception, ApiError):
|
||||||
|
if exception.error_code == "invalid_token":
|
||||||
|
return False
|
||||||
|
if isinstance(exception, NotAuthorizedError):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
@api_error_blueprint.app_errorhandler(Exception) # type: ignore
|
@api_error_blueprint.app_errorhandler(Exception) # type: ignore
|
||||||
def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||||
"""Handles unexpected exceptions."""
|
"""Handles unexpected exceptions."""
|
||||||
set_user_sentry_context()
|
set_user_sentry_context()
|
||||||
|
|
||||||
sentry_link = None
|
sentry_link = None
|
||||||
if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
|
if should_notify_sentry(exception):
|
||||||
id = capture_exception(exception)
|
id = capture_exception(exception)
|
||||||
|
|
||||||
if isinstance(exception, ApiError):
|
if isinstance(exception, ApiError):
|
||||||
|
@ -191,22 +213,41 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||||
# seems to break the sentry sdk context where we no longer get back
|
# seems to break the sentry sdk context where we no longer get back
|
||||||
# an event id or send out tags like username
|
# an event id or send out tags like username
|
||||||
current_app.logger.exception(exception)
|
current_app.logger.exception(exception)
|
||||||
|
else:
|
||||||
|
current_app.logger.warning(
|
||||||
|
f"Received exception: {exception}. Since we do not want this particular"
|
||||||
|
" exception in sentry, we cannot use logger.exception or logger.error, so"
|
||||||
|
" there will be no backtrace. see api_error.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
error_code = "internal_server_error"
|
||||||
|
status_code = 500
|
||||||
|
if (
|
||||||
|
isinstance(exception, NotAuthorizedError)
|
||||||
|
or isinstance(exception, TokenNotProvidedError)
|
||||||
|
or isinstance(exception, TokenInvalidError)
|
||||||
|
):
|
||||||
|
error_code = "not_authorized"
|
||||||
|
status_code = 403
|
||||||
|
if isinstance(exception, UserNotLoggedInError):
|
||||||
|
error_code = "not_authenticated"
|
||||||
|
status_code = 401
|
||||||
|
|
||||||
# set api_exception like this to avoid confusing mypy
|
# set api_exception like this to avoid confusing mypy
|
||||||
# and what type the object is
|
# about what type the object is
|
||||||
api_exception = None
|
api_exception = None
|
||||||
if isinstance(exception, ApiError):
|
if isinstance(exception, ApiError):
|
||||||
api_exception = exception
|
api_exception = exception
|
||||||
else:
|
else:
|
||||||
api_exception = ApiError(
|
api_exception = ApiError(
|
||||||
error_code="internal_server_error",
|
error_code=error_code,
|
||||||
message=f"{exception.__class__.__name__}",
|
message=f"{exception.__class__.__name__}",
|
||||||
sentry_link=sentry_link,
|
sentry_link=sentry_link,
|
||||||
status_code=500,
|
status_code=status_code,
|
||||||
)
|
)
|
||||||
|
|
||||||
return make_response(jsonify(api_exception), api_exception.status_code)
|
return make_response(jsonify(api_exception), api_exception.status_code)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
"""APIs for dealing with process groups, process models, and process instances."""
|
"""APIs for dealing with process groups, process models, and process instances."""
|
||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import flask.wrappers
|
import flask.wrappers
|
||||||
from flask import Blueprint
|
from flask import Blueprint
|
||||||
|
@ -81,10 +83,12 @@ def process_list() -> Any:
|
||||||
return SpecReferenceSchema(many=True).dump(references)
|
return SpecReferenceSchema(many=True).dump(references)
|
||||||
|
|
||||||
|
|
||||||
def process_data_show(
|
def _process_data_fetcher(
|
||||||
process_instance_id: int,
|
process_instance_id: int,
|
||||||
process_data_identifier: str,
|
process_data_identifier: str,
|
||||||
modified_process_model_identifier: str,
|
modified_process_model_identifier: str,
|
||||||
|
download_file_data: bool,
|
||||||
|
index: Optional[int] = None,
|
||||||
) -> flask.wrappers.Response:
|
) -> flask.wrappers.Response:
|
||||||
"""Process_data_show."""
|
"""Process_data_show."""
|
||||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
|
@ -94,6 +98,26 @@ def process_data_show(
|
||||||
if process_data_identifier in all_process_data:
|
if process_data_identifier in all_process_data:
|
||||||
process_data_value = all_process_data[process_data_identifier]
|
process_data_value = all_process_data[process_data_identifier]
|
||||||
|
|
||||||
|
if process_data_value is not None and index is not None:
|
||||||
|
process_data_value = process_data_value[index]
|
||||||
|
|
||||||
|
if (
|
||||||
|
download_file_data
|
||||||
|
and isinstance(process_data_value, str)
|
||||||
|
and process_data_value.startswith("data:")
|
||||||
|
):
|
||||||
|
parts = process_data_value.split(";")
|
||||||
|
mimetype = parts[0][4:]
|
||||||
|
filename = parts[1]
|
||||||
|
base64_value = parts[2].split(",")[1]
|
||||||
|
file_contents = base64.b64decode(base64_value)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
file_contents,
|
||||||
|
mimetype=mimetype,
|
||||||
|
headers={"Content-disposition": f"attachment; filename={filename}"},
|
||||||
|
)
|
||||||
|
|
||||||
return make_response(
|
return make_response(
|
||||||
jsonify(
|
jsonify(
|
||||||
{
|
{
|
||||||
|
@ -105,6 +129,37 @@ def process_data_show(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_show(
|
||||||
|
process_instance_id: int,
|
||||||
|
process_data_identifier: str,
|
||||||
|
modified_process_model_identifier: str,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
"""Process_data_show."""
|
||||||
|
return _process_data_fetcher(
|
||||||
|
process_instance_id,
|
||||||
|
process_data_identifier,
|
||||||
|
modified_process_model_identifier,
|
||||||
|
False,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_file_download(
|
||||||
|
process_instance_id: int,
|
||||||
|
process_data_identifier: str,
|
||||||
|
modified_process_model_identifier: str,
|
||||||
|
index: Optional[int] = None,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
"""Process_data_file_download."""
|
||||||
|
return _process_data_fetcher(
|
||||||
|
process_instance_id,
|
||||||
|
process_data_identifier,
|
||||||
|
modified_process_model_identifier,
|
||||||
|
True,
|
||||||
|
index,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# sample body:
|
# sample body:
|
||||||
# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
|
# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
|
||||||
# "full_name": "sartography/sample-process-models", "private": False .... }}
|
# "full_name": "sartography/sample-process-models", "private": False .... }}
|
||||||
|
|
|
@ -10,6 +10,7 @@ from typing import Union
|
||||||
|
|
||||||
import flask.wrappers
|
import flask.wrappers
|
||||||
import jinja2
|
import jinja2
|
||||||
|
import sentry_sdk
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
|
@ -326,13 +327,12 @@ def process_data_show(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def task_submit(
|
def task_submit_shared(
|
||||||
process_instance_id: int,
|
process_instance_id: int,
|
||||||
task_id: str,
|
task_id: str,
|
||||||
body: Dict[str, Any],
|
body: Dict[str, Any],
|
||||||
terminate_loop: bool = False,
|
terminate_loop: bool = False,
|
||||||
) -> flask.wrappers.Response:
|
) -> flask.wrappers.Response:
|
||||||
"""Task_submit_user_data."""
|
|
||||||
principal = _find_principal_or_raise()
|
principal = _find_principal_or_raise()
|
||||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
if not process_instance.can_submit_task():
|
if not process_instance.can_submit_task():
|
||||||
|
@ -380,15 +380,16 @@ def task_submit(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
processor.lock_process_instance("Web")
|
with sentry_sdk.start_span(op="task", description="complete_form_task"):
|
||||||
ProcessInstanceService.complete_form_task(
|
processor.lock_process_instance("Web")
|
||||||
processor=processor,
|
ProcessInstanceService.complete_form_task(
|
||||||
spiff_task=spiff_task,
|
processor=processor,
|
||||||
data=body,
|
spiff_task=spiff_task,
|
||||||
user=g.user,
|
data=body,
|
||||||
human_task=human_task,
|
user=g.user,
|
||||||
)
|
human_task=human_task,
|
||||||
processor.unlock_process_instance("Web")
|
)
|
||||||
|
processor.unlock_process_instance("Web")
|
||||||
|
|
||||||
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
||||||
# task spec, complete that form as well.
|
# task spec, complete that form as well.
|
||||||
|
@ -417,6 +418,19 @@ def task_submit(
|
||||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||||
|
|
||||||
|
|
||||||
|
def task_submit(
|
||||||
|
process_instance_id: int,
|
||||||
|
task_id: str,
|
||||||
|
body: Dict[str, Any],
|
||||||
|
terminate_loop: bool = False,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
"""Task_submit_user_data."""
|
||||||
|
with sentry_sdk.start_span(
|
||||||
|
op="controller_action", description="tasks_controller.task_submit"
|
||||||
|
):
|
||||||
|
return task_submit_shared(process_instance_id, task_id, body, terminate_loop)
|
||||||
|
|
||||||
|
|
||||||
def _get_tasks(
|
def _get_tasks(
|
||||||
processes_started_by_user: bool = True,
|
processes_started_by_user: bool = True,
|
||||||
has_lane_assignment_id: bool = True,
|
has_lane_assignment_id: bool = True,
|
||||||
|
|
|
@ -17,6 +17,7 @@ from flask import request
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
|
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
||||||
from spiffworkflow_backend.services.authentication_service import (
|
from spiffworkflow_backend.services.authentication_service import (
|
||||||
|
@ -58,6 +59,10 @@ def verify_token(
|
||||||
if not token and "Authorization" in request.headers:
|
if not token and "Authorization" in request.headers:
|
||||||
token = request.headers["Authorization"].removeprefix("Bearer ")
|
token = request.headers["Authorization"].removeprefix("Bearer ")
|
||||||
|
|
||||||
|
if not token and "access_token" in request.cookies:
|
||||||
|
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/"):
|
||||||
|
token = request.cookies["access_token"]
|
||||||
|
|
||||||
# This should never be set here but just in case
|
# This should never be set here but just in case
|
||||||
_clear_auth_tokens_from_thread_local_data()
|
_clear_auth_tokens_from_thread_local_data()
|
||||||
|
|
||||||
|
@ -96,7 +101,7 @@ def verify_token(
|
||||||
)
|
)
|
||||||
if auth_token and "error" not in auth_token:
|
if auth_token and "error" not in auth_token:
|
||||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||||
tld.new_access_token = auth_token["access_token"]
|
tld.new_access_token = auth_token["id_token"]
|
||||||
tld.new_id_token = auth_token["id_token"]
|
tld.new_id_token = auth_token["id_token"]
|
||||||
# We have the user, but this code is a bit convoluted, and will later demand
|
# We have the user, but this code is a bit convoluted, and will later demand
|
||||||
# a user_info object so it can look up the user. Sorry to leave this crap here.
|
# a user_info object so it can look up the user. Sorry to leave this crap here.
|
||||||
|
@ -186,6 +191,7 @@ def set_new_access_token_in_cookie(
|
||||||
):
|
):
|
||||||
domain_for_frontend_cookie = None
|
domain_for_frontend_cookie = None
|
||||||
|
|
||||||
|
# fixme - we should not be passing the access token back to the client
|
||||||
if hasattr(tld, "new_access_token") and tld.new_access_token:
|
if hasattr(tld, "new_access_token") and tld.new_access_token:
|
||||||
response.set_cookie(
|
response.set_cookie(
|
||||||
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
|
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
|
||||||
|
@ -254,7 +260,7 @@ def parse_id_token(token: str) -> Any:
|
||||||
return json.loads(decoded)
|
return json.loads(decoded)
|
||||||
|
|
||||||
|
|
||||||
def login_return(code: str, state: str, session_state: str) -> Optional[Response]:
|
def login_return(code: str, state: str, session_state: str = "") -> Optional[Response]:
|
||||||
"""Login_return."""
|
"""Login_return."""
|
||||||
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
||||||
state_redirect_url = state_dict["redirect_url"]
|
state_redirect_url = state_dict["redirect_url"]
|
||||||
|
@ -269,12 +275,13 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
|
||||||
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
||||||
g.user = user_model.id
|
g.user = user_model.id
|
||||||
g.token = auth_token_object["id_token"]
|
g.token = auth_token_object["id_token"]
|
||||||
AuthenticationService.store_refresh_token(
|
if "refresh_token" in auth_token_object:
|
||||||
user_model.id, auth_token_object["refresh_token"]
|
AuthenticationService.store_refresh_token(
|
||||||
)
|
user_model.id, auth_token_object["refresh_token"]
|
||||||
|
)
|
||||||
redirect_url = state_redirect_url
|
redirect_url = state_redirect_url
|
||||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||||
tld.new_access_token = auth_token_object["access_token"]
|
tld.new_access_token = auth_token_object["id_token"]
|
||||||
tld.new_id_token = auth_token_object["id_token"]
|
tld.new_id_token = auth_token_object["id_token"]
|
||||||
return redirect(redirect_url)
|
return redirect(redirect_url)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,53 @@
|
||||||
|
"""Markdown_file_download_link."""
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||||
|
from spiffworkflow_backend.models.script_attributes_context import (
|
||||||
|
ScriptAttributesContext,
|
||||||
|
)
|
||||||
|
from spiffworkflow_backend.scripts.script import Script
|
||||||
|
|
||||||
|
|
||||||
|
class GetMarkdownFileDownloadLink(Script):
|
||||||
|
"""GetMarkdownFileDownloadLink."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def requires_privileged_permissions() -> bool:
|
||||||
|
"""We have deemed this function safe to run without elevated permissions."""
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_description(self) -> str:
|
||||||
|
"""Get_description."""
|
||||||
|
return """Returns a string which is a string in markdown format."""
|
||||||
|
|
||||||
|
def run(
|
||||||
|
self,
|
||||||
|
script_attributes_context: ScriptAttributesContext,
|
||||||
|
*_args: Any,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Any:
|
||||||
|
"""Run."""
|
||||||
|
# example input:
|
||||||
|
# "data:application/pdf;name=Harmeet_1234.pdf;base64,JV...."
|
||||||
|
process_data_identifier = kwargs["key"]
|
||||||
|
parts = kwargs["file_data"].split(";")
|
||||||
|
file_index = kwargs["file_index"]
|
||||||
|
label = unquote(parts[1].split("=")[1])
|
||||||
|
process_model_identifier = script_attributes_context.process_model_identifier
|
||||||
|
modified_process_model_identifier = (
|
||||||
|
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||||
|
process_model_identifier
|
||||||
|
)
|
||||||
|
)
|
||||||
|
process_instance_id = script_attributes_context.process_instance_id
|
||||||
|
url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"]
|
||||||
|
url += (
|
||||||
|
f"/v1.0/process-data-file-download/{modified_process_model_identifier}/"
|
||||||
|
+ f"{process_instance_id}/{process_data_identifier}?index={file_index}"
|
||||||
|
)
|
||||||
|
link = f"[{label}]({url})"
|
||||||
|
|
||||||
|
return link
|
|
@ -11,7 +11,6 @@ from flask import current_app
|
||||||
from flask import redirect
|
from flask import redirect
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
|
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
|
||||||
|
|
||||||
|
@ -20,7 +19,21 @@ class MissingAccessTokenError(Exception):
|
||||||
"""MissingAccessTokenError."""
|
"""MissingAccessTokenError."""
|
||||||
|
|
||||||
|
|
||||||
|
class NotAuthorizedError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RefreshTokenStorageError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UserNotLoggedInError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# These could be either 'id' OR 'access' tokens and we can't always know which
|
# These could be either 'id' OR 'access' tokens and we can't always know which
|
||||||
|
|
||||||
|
|
||||||
class TokenExpiredError(Exception):
|
class TokenExpiredError(Exception):
|
||||||
"""TokenExpiredError."""
|
"""TokenExpiredError."""
|
||||||
|
|
||||||
|
@ -29,6 +42,10 @@ class TokenInvalidError(Exception):
|
||||||
"""TokenInvalidError."""
|
"""TokenInvalidError."""
|
||||||
|
|
||||||
|
|
||||||
|
class TokenNotProvidedError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationProviderTypes(enum.Enum):
|
class AuthenticationProviderTypes(enum.Enum):
|
||||||
"""AuthenticationServiceProviders."""
|
"""AuthenticationServiceProviders."""
|
||||||
|
|
||||||
|
@ -183,9 +200,8 @@ class AuthenticationService:
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
raise ApiError(
|
raise RefreshTokenStorageError(
|
||||||
error_code="store_refresh_token_error",
|
f"We could not store the refresh token. Original error is {e}",
|
||||||
message=f"We could not store the refresh token. Original error is {e}",
|
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -21,7 +21,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
from sqlalchemy import text
|
from sqlalchemy import text
|
||||||
|
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
|
||||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.group import GroupModel
|
from spiffworkflow_backend.models.group import GroupModel
|
||||||
|
@ -34,6 +33,11 @@ from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.models.user import UserNotFoundError
|
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||||
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
||||||
from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint
|
from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint
|
||||||
|
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import TokenExpiredError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
|
||||||
|
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
|
||||||
from spiffworkflow_backend.services.group_service import GroupService
|
from spiffworkflow_backend.services.group_service import GroupService
|
||||||
from spiffworkflow_backend.services.user_service import UserService
|
from spiffworkflow_backend.services.user_service import UserService
|
||||||
|
|
||||||
|
@ -98,20 +102,16 @@ class AuthorizationService:
|
||||||
def verify_sha256_token(cls, auth_header: Optional[str]) -> None:
|
def verify_sha256_token(cls, auth_header: Optional[str]) -> None:
|
||||||
"""Verify_sha256_token."""
|
"""Verify_sha256_token."""
|
||||||
if auth_header is None:
|
if auth_header is None:
|
||||||
raise ApiError(
|
raise TokenNotProvidedError(
|
||||||
error_code="unauthorized",
|
"unauthorized",
|
||||||
message="",
|
|
||||||
status_code=403,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
received_sign = auth_header.split("sha256=")[-1].strip()
|
received_sign = auth_header.split("sha256=")[-1].strip()
|
||||||
secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode()
|
secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode()
|
||||||
expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest()
|
expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest()
|
||||||
if not compare_digest(received_sign, expected_sign):
|
if not compare_digest(received_sign, expected_sign):
|
||||||
raise ApiError(
|
raise TokenInvalidError(
|
||||||
error_code="unauthorized",
|
"unauthorized",
|
||||||
message="",
|
|
||||||
status_code=403,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -393,10 +393,8 @@ class AuthorizationService:
|
||||||
authorization_exclusion_list = ["permissions_check"]
|
authorization_exclusion_list = ["permissions_check"]
|
||||||
|
|
||||||
if not hasattr(g, "user"):
|
if not hasattr(g, "user"):
|
||||||
raise ApiError(
|
raise UserNotLoggedInError(
|
||||||
error_code="user_not_logged_in",
|
"User is not logged in. Please log in",
|
||||||
message="User is not logged in. Please log in",
|
|
||||||
status_code=401,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
api_view_function = current_app.view_functions[request.endpoint]
|
api_view_function = current_app.view_functions[request.endpoint]
|
||||||
|
@ -416,13 +414,11 @@ class AuthorizationService:
|
||||||
if has_permission:
|
if has_permission:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
raise ApiError(
|
raise NotAuthorizedError(
|
||||||
error_code="unauthorized",
|
(
|
||||||
message=(
|
|
||||||
f"User {g.user.username} is not authorized to perform requested action:"
|
f"User {g.user.username} is not authorized to perform requested action:"
|
||||||
f" {permission_string} - {request.path}"
|
f" {permission_string} - {request.path}"
|
||||||
),
|
),
|
||||||
status_code=403,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -440,13 +436,11 @@ class AuthorizationService:
|
||||||
payload = jwt.decode(auth_token, options={"verify_signature": False})
|
payload = jwt.decode(auth_token, options={"verify_signature": False})
|
||||||
return payload
|
return payload
|
||||||
except jwt.ExpiredSignatureError as exception:
|
except jwt.ExpiredSignatureError as exception:
|
||||||
raise ApiError(
|
raise TokenExpiredError(
|
||||||
"token_expired",
|
|
||||||
"The Authentication token you provided expired and must be renewed.",
|
"The Authentication token you provided expired and must be renewed.",
|
||||||
) from exception
|
) from exception
|
||||||
except jwt.InvalidTokenError as exception:
|
except jwt.InvalidTokenError as exception:
|
||||||
raise ApiError(
|
raise TokenInvalidError(
|
||||||
"token_invalid",
|
|
||||||
(
|
(
|
||||||
"The Authentication token you provided is invalid. You need a new"
|
"The Authentication token you provided is invalid. You need a new"
|
||||||
" token. "
|
" token. "
|
||||||
|
@ -551,7 +545,9 @@ class AuthorizationService:
|
||||||
|
|
||||||
permissions_to_assign: list[PermissionToAssign] = []
|
permissions_to_assign: list[PermissionToAssign] = []
|
||||||
|
|
||||||
# we were thinking that if you can start an instance, you ought to be able to view your own instances.
|
# we were thinking that if you can start an instance, you ought to be able to:
|
||||||
|
# 1. view your own instances.
|
||||||
|
# 2. view the logs for these instances.
|
||||||
if permission_set == "start":
|
if permission_set == "start":
|
||||||
target_uri = f"/process-instances/{process_related_path_segment}"
|
target_uri = f"/process-instances/{process_related_path_segment}"
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(
|
||||||
|
@ -561,6 +557,10 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(
|
||||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||||
)
|
)
|
||||||
|
target_uri = f"/logs/{process_related_path_segment}"
|
||||||
|
permissions_to_assign.append(
|
||||||
|
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if permission_set == "all":
|
if permission_set == "all":
|
||||||
|
|
|
@ -240,5 +240,8 @@ class DBHandler(logging.Handler):
|
||||||
"spiff_step": spiff_step,
|
"spiff_step": spiff_step,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if len(self.logs) % 1 == 0:
|
# so at some point we are going to insert logs.
|
||||||
|
# we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting
|
||||||
|
# on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log.
|
||||||
|
if len(self.logs) % 100 == 0:
|
||||||
self.bulk_insert_logs()
|
self.bulk_insert_logs()
|
||||||
|
|
|
@ -26,8 +26,13 @@ from lxml import etree # type: ignore
|
||||||
from lxml.etree import XMLSyntaxError # type: ignore
|
from lxml.etree import XMLSyntaxError # type: ignore
|
||||||
from RestrictedPython import safe_globals # type: ignore
|
from RestrictedPython import safe_globals # type: ignore
|
||||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ignore
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment
|
||||||
|
from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore
|
||||||
|
EventBasedGatewayConverter,
|
||||||
|
)
|
||||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
|
||||||
|
@ -36,37 +41,12 @@ from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignor
|
||||||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
|
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||||
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore
|
||||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||||
from SpiffWorkflow.exceptions import WorkflowException
|
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
|
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
|
||||||
CallActivityTaskConverter,
|
|
||||||
)
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
|
||||||
EventBasedGatewayConverter,
|
|
||||||
)
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
|
||||||
IntermediateCatchEventConverter,
|
|
||||||
)
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
|
||||||
IntermediateThrowEventConverter,
|
|
||||||
)
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ManualTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ReceiveTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ScriptTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import SendTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import ServiceTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import StartEventConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import SubWorkflowTaskConverter
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
|
||||||
TransactionSubprocessConverter,
|
|
||||||
)
|
|
||||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverter
|
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||||
|
@ -109,6 +89,8 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg
|
||||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||||
from spiffworkflow_backend.services.user_service import UserService
|
from spiffworkflow_backend.services.user_service import UserService
|
||||||
|
|
||||||
|
SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter)
|
||||||
|
|
||||||
|
|
||||||
# Sorry about all this crap. I wanted to move this thing to another file, but
|
# Sorry about all this crap. I wanted to move this thing to another file, but
|
||||||
# importing a bunch of types causes circular imports.
|
# importing a bunch of types causes circular imports.
|
||||||
|
@ -151,6 +133,132 @@ class ProcessInstanceLockedBySomethingElseError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
|
||||||
|
def __init__(self, environment_globals: Dict[str, Any]):
|
||||||
|
"""BoxedTaskDataBasedScriptEngineEnvironment."""
|
||||||
|
self._last_result: Dict[str, Any] = {}
|
||||||
|
super().__init__(environment_globals)
|
||||||
|
|
||||||
|
def execute(
|
||||||
|
self,
|
||||||
|
script: str,
|
||||||
|
context: Dict[str, Any],
|
||||||
|
external_methods: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
super().execute(script, context, external_methods)
|
||||||
|
self._last_result = context
|
||||||
|
|
||||||
|
def last_result(self) -> Dict[str, Any]:
|
||||||
|
return self._last_result
|
||||||
|
|
||||||
|
def clear_state(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def revise_state_with_task_data(self, task: SpiffTask) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment): # type: ignore
|
||||||
|
PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state"
|
||||||
|
|
||||||
|
def __init__(self, environment_globals: Dict[str, Any]):
|
||||||
|
"""NonTaskDataBasedScriptEngineEnvironment."""
|
||||||
|
self.state: Dict[str, Any] = {}
|
||||||
|
self.non_user_defined_keys = set(
|
||||||
|
[*environment_globals.keys()] + ["__builtins__", "current_user"]
|
||||||
|
)
|
||||||
|
super().__init__(environment_globals)
|
||||||
|
|
||||||
|
def evaluate(
|
||||||
|
self,
|
||||||
|
expression: str,
|
||||||
|
context: Dict[str, Any],
|
||||||
|
external_methods: Optional[dict[str, Any]] = None,
|
||||||
|
) -> Any:
|
||||||
|
# TODO: once integrated look at the tests that fail without Box
|
||||||
|
Box.convert_to_box(context)
|
||||||
|
state = {}
|
||||||
|
state.update(self.globals)
|
||||||
|
state.update(external_methods or {})
|
||||||
|
state.update(self.state)
|
||||||
|
state.update(context)
|
||||||
|
return eval(expression, state) # noqa
|
||||||
|
|
||||||
|
def execute(
|
||||||
|
self,
|
||||||
|
script: str,
|
||||||
|
context: Dict[str, Any],
|
||||||
|
external_methods: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
# TODO: once integrated look at the tests that fail without Box
|
||||||
|
Box.convert_to_box(context)
|
||||||
|
self.state.update(self.globals)
|
||||||
|
self.state.update(external_methods or {})
|
||||||
|
self.state.update(context)
|
||||||
|
exec(script, self.state) # noqa
|
||||||
|
|
||||||
|
self.state = self._user_defined_state(external_methods)
|
||||||
|
|
||||||
|
# the task data needs to be updated with the current state so data references can be resolved properly.
|
||||||
|
# the state will be removed later once the task is completed.
|
||||||
|
context.update(self.state)
|
||||||
|
|
||||||
|
def _user_defined_state(
|
||||||
|
self, external_methods: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
keys_to_filter = self.non_user_defined_keys
|
||||||
|
if external_methods is not None:
|
||||||
|
keys_to_filter |= set(external_methods.keys())
|
||||||
|
|
||||||
|
return {
|
||||||
|
k: v
|
||||||
|
for k, v in self.state.items()
|
||||||
|
if k not in keys_to_filter and not callable(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
def last_result(self) -> Dict[str, Any]:
|
||||||
|
return self.state
|
||||||
|
|
||||||
|
def clear_state(self) -> None:
|
||||||
|
self.state = {}
|
||||||
|
|
||||||
|
def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
key = self.PYTHON_ENVIRONMENT_STATE_KEY
|
||||||
|
state = self._user_defined_state()
|
||||||
|
bpmn_process_instance.data[key] = state
|
||||||
|
|
||||||
|
def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
key = self.PYTHON_ENVIRONMENT_STATE_KEY
|
||||||
|
self.state = bpmn_process_instance.data.get(key, {})
|
||||||
|
|
||||||
|
def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
bpmn_process_instance.data.update(self._user_defined_state())
|
||||||
|
|
||||||
|
def revise_state_with_task_data(self, task: SpiffTask) -> None:
|
||||||
|
state_keys = set(self.state.keys())
|
||||||
|
task_data_keys = set(task.data.keys())
|
||||||
|
state_keys_to_remove = state_keys - task_data_keys
|
||||||
|
task_data_keys_to_keep = task_data_keys - state_keys
|
||||||
|
|
||||||
|
self.state = {
|
||||||
|
k: v for k, v in self.state.items() if k not in state_keys_to_remove
|
||||||
|
}
|
||||||
|
task.data = {k: v for k, v in task.data.items() if k in task_data_keys_to_keep}
|
||||||
|
|
||||||
|
|
||||||
|
class CustomScriptEngineEnvironment(BoxedTaskDataBasedScriptEngineEnvironment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||||
"""This is a custom script processor that can be easily injected into Spiff Workflow.
|
"""This is a custom script processor that can be easily injected into Spiff Workflow.
|
||||||
|
|
||||||
|
@ -180,7 +288,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||||
default_globals.update(safe_globals)
|
default_globals.update(safe_globals)
|
||||||
default_globals["__builtins__"]["__import__"] = _import
|
default_globals["__builtins__"]["__import__"] = _import
|
||||||
|
|
||||||
super().__init__(default_globals=default_globals)
|
environment = CustomScriptEngineEnvironment(default_globals)
|
||||||
|
|
||||||
|
super().__init__(environment=environment)
|
||||||
|
|
||||||
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
|
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
|
||||||
"""__get_augment_methods."""
|
"""__get_augment_methods."""
|
||||||
|
@ -279,29 +389,12 @@ class ProcessInstanceProcessor:
|
||||||
|
|
||||||
_script_engine = CustomBpmnScriptEngine()
|
_script_engine = CustomBpmnScriptEngine()
|
||||||
SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
|
SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
|
||||||
|
|
||||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||||
[
|
SPIFF_SPEC_CONFIG
|
||||||
BoundaryEventConverter,
|
|
||||||
BusinessRuleTaskConverter,
|
|
||||||
CallActivityTaskConverter,
|
|
||||||
EndEventConverter,
|
|
||||||
IntermediateCatchEventConverter,
|
|
||||||
IntermediateThrowEventConverter,
|
|
||||||
EventBasedGatewayConverter,
|
|
||||||
ManualTaskConverter,
|
|
||||||
NoneTaskConverter,
|
|
||||||
ReceiveTaskConverter,
|
|
||||||
ScriptTaskConverter,
|
|
||||||
SendTaskConverter,
|
|
||||||
ServiceTaskConverter,
|
|
||||||
StartEventConverter,
|
|
||||||
SubWorkflowTaskConverter,
|
|
||||||
TransactionSubprocessConverter,
|
|
||||||
UserTaskConverter,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||||
_event_serializer = EventBasedGatewayConverter()
|
_event_serializer = EventBasedGatewayConverter(wf_spec_converter)
|
||||||
|
|
||||||
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
||||||
VALIDATION_PROCESS_KEY = "validate_only"
|
VALIDATION_PROCESS_KEY = "validate_only"
|
||||||
|
@ -393,7 +486,7 @@ class ProcessInstanceProcessor:
|
||||||
validate_only,
|
validate_only,
|
||||||
subprocesses=subprocesses,
|
subprocesses=subprocesses,
|
||||||
)
|
)
|
||||||
self.bpmn_process_instance.script_engine = self._script_engine
|
self.set_script_engine(self.bpmn_process_instance)
|
||||||
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
||||||
|
|
||||||
except MissingSpecError as ke:
|
except MissingSpecError as ke:
|
||||||
|
@ -439,6 +532,18 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_process_spec, subprocesses
|
bpmn_process_spec, subprocesses
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
ProcessInstanceProcessor._script_engine.environment.restore_state(
|
||||||
|
bpmn_process_instance
|
||||||
|
)
|
||||||
|
bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine
|
||||||
|
|
||||||
|
def preserve_script_engine_state(self) -> None:
|
||||||
|
ProcessInstanceProcessor._script_engine.environment.preserve_state(
|
||||||
|
self.bpmn_process_instance
|
||||||
|
)
|
||||||
|
|
||||||
def current_user(self) -> Any:
|
def current_user(self) -> Any:
|
||||||
"""Current_user."""
|
"""Current_user."""
|
||||||
current_user = None
|
current_user = None
|
||||||
|
@ -471,11 +576,12 @@ class ProcessInstanceProcessor:
|
||||||
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
|
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
|
||||||
) -> BpmnWorkflow:
|
) -> BpmnWorkflow:
|
||||||
"""Get_bpmn_process_instance_from_workflow_spec."""
|
"""Get_bpmn_process_instance_from_workflow_spec."""
|
||||||
return BpmnWorkflow(
|
bpmn_process_instance = BpmnWorkflow(
|
||||||
spec,
|
spec,
|
||||||
script_engine=ProcessInstanceProcessor._script_engine,
|
|
||||||
subprocess_specs=subprocesses,
|
subprocess_specs=subprocesses,
|
||||||
)
|
)
|
||||||
|
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
||||||
|
return bpmn_process_instance
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __get_bpmn_process_instance(
|
def __get_bpmn_process_instance(
|
||||||
|
@ -502,9 +608,7 @@ class ProcessInstanceProcessor:
|
||||||
finally:
|
finally:
|
||||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||||
|
|
||||||
bpmn_process_instance.script_engine = (
|
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
||||||
ProcessInstanceProcessor._script_engine
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
bpmn_process_instance = (
|
bpmn_process_instance = (
|
||||||
ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
||||||
|
@ -868,7 +972,7 @@ class ProcessInstanceProcessor:
|
||||||
def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
|
def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
|
||||||
"""Send an event to the workflow."""
|
"""Send an event to the workflow."""
|
||||||
payload = event_data.pop("payload", None)
|
payload = event_data.pop("payload", None)
|
||||||
event_definition = self._event_serializer.restore(event_data)
|
event_definition = self._event_serializer.registry.restore(event_data)
|
||||||
if payload is not None:
|
if payload is not None:
|
||||||
event_definition.payload = payload
|
event_definition.payload = payload
|
||||||
current_app.logger.info(
|
current_app.logger.info(
|
||||||
|
@ -1385,25 +1489,25 @@ class ProcessInstanceProcessor:
|
||||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||||
"""Do_engine_steps."""
|
"""Do_engine_steps."""
|
||||||
step_details = []
|
step_details = []
|
||||||
|
|
||||||
|
def did_complete_task(task: SpiffTask) -> None:
|
||||||
|
self._script_engine.environment.revise_state_with_task_data(task)
|
||||||
|
step_details.append(self.spiff_step_details_mapping())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.bpmn_process_instance.refresh_waiting_tasks(
|
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||||
#
|
|
||||||
# commenting out to see if this helps with the growing spiff steps/db issue
|
|
||||||
#
|
|
||||||
# will_refresh_task=lambda t: self.increment_spiff_step(),
|
|
||||||
# did_refresh_task=lambda t: step_details.append(
|
|
||||||
# self.spiff_step_details_mapping()
|
|
||||||
# ),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.bpmn_process_instance.do_engine_steps(
|
self.bpmn_process_instance.do_engine_steps(
|
||||||
exit_at=exit_at,
|
exit_at=exit_at,
|
||||||
will_complete_task=lambda t: self.increment_spiff_step(),
|
will_complete_task=lambda t: self.increment_spiff_step(),
|
||||||
did_complete_task=lambda t: step_details.append(
|
did_complete_task=did_complete_task,
|
||||||
self.spiff_step_details_mapping()
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.bpmn_process_instance.is_completed():
|
||||||
|
self._script_engine.environment.finalize_result(
|
||||||
|
self.bpmn_process_instance
|
||||||
|
)
|
||||||
|
|
||||||
self.process_bpmn_messages()
|
self.process_bpmn_messages()
|
||||||
self.queue_waiting_receive_messages()
|
self.queue_waiting_receive_messages()
|
||||||
|
|
||||||
|
@ -1466,6 +1570,7 @@ class ProcessInstanceProcessor:
|
||||||
def serialize(self) -> str:
|
def serialize(self) -> str:
|
||||||
"""Serialize."""
|
"""Serialize."""
|
||||||
self.check_task_data_size()
|
self.check_task_data_size()
|
||||||
|
self.preserve_script_engine_state()
|
||||||
return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore
|
return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore
|
||||||
|
|
||||||
def next_user_tasks(self) -> list[SpiffTask]:
|
def next_user_tasks(self) -> list[SpiffTask]:
|
||||||
|
|
|
@ -4,6 +4,7 @@ from typing import Any
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import sentry_sdk
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
|
|
||||||
|
@ -234,8 +235,9 @@ class ProcessInstanceService:
|
||||||
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
|
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
|
||||||
processor.complete_task(spiff_task, human_task, user=user)
|
processor.complete_task(spiff_task, human_task, user=user)
|
||||||
|
|
||||||
# maybe move this out once we have the interstitial page since this is here just so we can get the next human task
|
with sentry_sdk.start_span(op="task", description="backend_do_engine_steps"):
|
||||||
processor.do_engine_steps(save=True)
|
# maybe move this out once we have the interstitial page since this is here just so we can get the next human task
|
||||||
|
processor.do_engine_steps(save=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
|
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
|
||||||
|
|
|
@ -45,6 +45,7 @@ class ScriptUnitTestRunner:
|
||||||
context = input_context.copy()
|
context = input_context.copy()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
cls._script_engine.environment.clear_state()
|
||||||
cls._script_engine._execute(context=context, script=script)
|
cls._script_engine._execute(context=context, script=script)
|
||||||
except SyntaxError as ex:
|
except SyntaxError as ex:
|
||||||
return ScriptUnitTestResult(
|
return ScriptUnitTestResult(
|
||||||
|
@ -77,6 +78,7 @@ class ScriptUnitTestRunner:
|
||||||
error=f"Failed to execute script: {error_message}",
|
error=f"Failed to execute script: {error_message}",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
context = cls._script_engine.environment.last_result()
|
||||||
result_as_boolean = context == expected_output_context
|
result_as_boolean = context == expected_output_context
|
||||||
|
|
||||||
script_unit_test_result = ScriptUnitTestResult(
|
script_unit_test_result = ScriptUnitTestResult(
|
||||||
|
|
|
@ -3,6 +3,7 @@ import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
import sentry_sdk
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask import g
|
from flask import g
|
||||||
|
|
||||||
|
@ -45,27 +46,27 @@ class ServiceTaskDelegate:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str:
|
def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str:
|
||||||
"""Calls a connector via the configured proxy."""
|
"""Calls a connector via the configured proxy."""
|
||||||
params = {
|
call_url = f"{connector_proxy_url()}/v1/do/{name}"
|
||||||
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
with sentry_sdk.start_span(op="call-connector", description=call_url):
|
||||||
for k, v in bpmn_params.items()
|
params = {
|
||||||
}
|
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
||||||
params["spiff__task_data"] = task_data
|
for k, v in bpmn_params.items()
|
||||||
|
}
|
||||||
|
params["spiff__task_data"] = task_data
|
||||||
|
|
||||||
proxied_response = requests.post(
|
proxied_response = requests.post(call_url, json=params)
|
||||||
f"{connector_proxy_url()}/v1/do/{name}", json=params
|
|
||||||
)
|
|
||||||
|
|
||||||
parsed_response = json.loads(proxied_response.text)
|
parsed_response = json.loads(proxied_response.text)
|
||||||
|
|
||||||
if "refreshed_token_set" not in parsed_response:
|
if "refreshed_token_set" not in parsed_response:
|
||||||
return proxied_response.text
|
return proxied_response.text
|
||||||
|
|
||||||
secret_key = parsed_response["auth"]
|
secret_key = parsed_response["auth"]
|
||||||
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
|
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
|
||||||
user_id = g.user.id if UserService.has_user() else None
|
user_id = g.user.id if UserService.has_user() else None
|
||||||
SecretService().update_secret(secret_key, refreshed_token_set, user_id)
|
SecretService().update_secret(secret_key, refreshed_token_set, user_id)
|
||||||
|
|
||||||
return json.dumps(parsed_response["api_response"])
|
return json.dumps(parsed_response["api_response"])
|
||||||
|
|
||||||
|
|
||||||
class ServiceTaskService:
|
class ServiceTaskService:
|
||||||
|
|
|
@ -41,6 +41,11 @@ class TestGetAllPermissions(BaseTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
expected_permissions = [
|
expected_permissions = [
|
||||||
|
{
|
||||||
|
"group_identifier": "my_test_group",
|
||||||
|
"uri": "/logs/hey:group:*",
|
||||||
|
"permissions": ["read"],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"group_identifier": "my_test_group",
|
"group_identifier": "my_test_group",
|
||||||
"uri": "/process-instances/hey:group:*",
|
"uri": "/process-instances/hey:group:*",
|
||||||
|
|
|
@ -87,7 +87,8 @@ class TestGetLocaltime(BaseTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
assert spiff_task
|
assert spiff_task
|
||||||
data = spiff_task.data
|
|
||||||
|
data = ProcessInstanceProcessor._script_engine.environment.last_result()
|
||||||
some_time = data["some_time"]
|
some_time = data["some_time"]
|
||||||
localtime = data["localtime"]
|
localtime = data["localtime"]
|
||||||
timezone = data["timezone"]
|
timezone = data["timezone"]
|
||||||
|
|
|
@ -197,6 +197,10 @@ class TestAuthorizationService(BaseTest):
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test_explode_permissions_start_on_process_group."""
|
"""Test_explode_permissions_start_on_process_group."""
|
||||||
expected_permissions = [
|
expected_permissions = [
|
||||||
|
(
|
||||||
|
"/logs/some-process-group:some-process-model:*",
|
||||||
|
"read",
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"/process-instances/for-me/some-process-group:some-process-model:*",
|
"/process-instances/for-me/some-process-group:some-process-model:*",
|
||||||
"read",
|
"read",
|
||||||
|
@ -255,6 +259,10 @@ class TestAuthorizationService(BaseTest):
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test_explode_permissions_start_on_process_model."""
|
"""Test_explode_permissions_start_on_process_model."""
|
||||||
expected_permissions = [
|
expected_permissions = [
|
||||||
|
(
|
||||||
|
"/logs/some-process-group:some-process-model/*",
|
||||||
|
"read",
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"/process-instances/for-me/some-process-group:some-process-model/*",
|
"/process-instances/for-me/some-process-group:some-process-model/*",
|
||||||
"read",
|
"read",
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
/node_modules
|
|
@ -1,5 +1,5 @@
|
||||||
### STAGE 1: Build ###
|
# Base image to share ENV vars that activate VENV.
|
||||||
FROM quay.io/sartography/node:latest
|
FROM quay.io/sartography/node:latest AS base
|
||||||
|
|
||||||
RUN mkdir /app
|
RUN mkdir /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
@ -7,8 +7,16 @@ WORKDIR /app
|
||||||
# this matches total memory on spiffworkflow-demo
|
# this matches total memory on spiffworkflow-demo
|
||||||
ENV NODE_OPTIONS=--max_old_space_size=2048
|
ENV NODE_OPTIONS=--max_old_space_size=2048
|
||||||
|
|
||||||
ADD package.json /app/
|
# Setup image for installing JS dependencies.
|
||||||
ADD package-lock.json /app/
|
FROM base AS setup
|
||||||
|
|
||||||
|
COPY . /app/
|
||||||
|
|
||||||
|
RUN cp /app/package.json /app/package.json.bak
|
||||||
|
ADD justservewebserver.package.json /app/package.json
|
||||||
|
RUN npm ci --ignore-scripts
|
||||||
|
RUN cp -r /app/node_modules /app/node_modules.justserve
|
||||||
|
RUN cp /app/package.json.bak /app/package.json
|
||||||
|
|
||||||
# npm ci because it respects the lock file.
|
# npm ci because it respects the lock file.
|
||||||
# --ignore-scripts because authors can do bad things in postinstall scripts.
|
# --ignore-scripts because authors can do bad things in postinstall scripts.
|
||||||
|
@ -16,8 +24,19 @@ ADD package-lock.json /app/
|
||||||
# npx can-i-ignore-scripts can check that it's safe to ignore scripts.
|
# npx can-i-ignore-scripts can check that it's safe to ignore scripts.
|
||||||
RUN npm ci --ignore-scripts
|
RUN npm ci --ignore-scripts
|
||||||
|
|
||||||
COPY . /app/
|
|
||||||
|
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
|
# Final image without setup dependencies.
|
||||||
|
FROM base AS final
|
||||||
|
|
||||||
|
LABEL source="https://github.com/sartography/spiff-arena"
|
||||||
|
LABEL description="Software development platform for building, running, and monitoring executable diagrams"
|
||||||
|
|
||||||
|
# WARNING: On localhost frontend assumes backend is one port lowe.
|
||||||
|
ENV PORT0=7001
|
||||||
|
|
||||||
|
COPY --from=setup /app/build /app/build
|
||||||
|
COPY --from=setup /app/bin /app/bin
|
||||||
|
COPY --from=setup /app/node_modules.justserve /app/node_modules
|
||||||
|
|
||||||
ENTRYPOINT ["/app/bin/boot_server_in_docker"]
|
ENTRYPOINT ["/app/bin/boot_server_in_docker"]
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
{
|
||||||
|
"name": "spiffworkflow-frontend",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"dependencies": {
|
||||||
|
"serve": "^14.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"start": "ESLINT_NO_DEV_ERRORS=true PORT=7001 craco start",
|
||||||
|
"build": "craco build",
|
||||||
|
"test": "react-scripts test --coverage",
|
||||||
|
"t": "npm test -- --watchAll=false",
|
||||||
|
"eject": "craco eject",
|
||||||
|
"format": "prettier --write src/**/*.[tj]s{,x}",
|
||||||
|
"lint": "./node_modules/.bin/eslint src",
|
||||||
|
"lint:fix": "./node_modules/.bin/eslint --fix src"
|
||||||
|
},
|
||||||
|
"eslintConfig": {
|
||||||
|
"extends": [
|
||||||
|
"react-app",
|
||||||
|
"react-app/jest"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"browserslist": {
|
||||||
|
"production": [
|
||||||
|
">0.2%",
|
||||||
|
"not dead",
|
||||||
|
"not op_mini all"
|
||||||
|
],
|
||||||
|
"development": [
|
||||||
|
"last 1 chrome version",
|
||||||
|
"last 1 firefox version",
|
||||||
|
"last 1 safari version"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
|
@ -35,7 +35,7 @@ export default function ProcessGroupForm({
|
||||||
};
|
};
|
||||||
|
|
||||||
const hasValidIdentifier = (identifierToCheck: string) => {
|
const hasValidIdentifier = (identifierToCheck: string) => {
|
||||||
return identifierToCheck.match(/^[a-z0-9][0-9a-z-]+[a-z0-9]$/);
|
return identifierToCheck.match(/^[a-z0-9][0-9a-z-]*[a-z0-9]$/);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleFormSubmission = (event: any) => {
|
const handleFormSubmission = (event: any) => {
|
||||||
|
|
|
@ -217,7 +217,7 @@ export default function TaskShow() {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Grid fullWidth condensed>
|
<Grid fullWidth condensed>
|
||||||
<Column md={5} lg={8} sm={4}>
|
<Column sm={4} md={5} lg={8}>
|
||||||
<Form
|
<Form
|
||||||
formData={taskData}
|
formData={taskData}
|
||||||
onSubmit={handleFormSubmit}
|
onSubmit={handleFormSubmit}
|
||||||
|
|
|
@ -1,17 +1,36 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import AddIcon from '@mui/icons-material/Add';
|
import {
|
||||||
import IconButton from '@mui/material/IconButton';
|
FormContextType,
|
||||||
import { IconButtonProps } from '@rjsf/utils';
|
IconButtonProps,
|
||||||
|
RJSFSchema,
|
||||||
|
StrictRJSFSchema,
|
||||||
|
} from '@rjsf/utils';
|
||||||
|
|
||||||
const AddButton: React.ComponentType<IconButtonProps> = ({
|
// @ts-ignore
|
||||||
uiSchema,
|
import { AddAlt } from '@carbon/icons-react';
|
||||||
...props
|
|
||||||
}) => {
|
import IconButton from '../IconButton/IconButton';
|
||||||
|
|
||||||
|
/** The `AddButton` renders a button that represent the `Add` action on a form
|
||||||
|
*/
|
||||||
|
export default function AddButton<
|
||||||
|
T = any,
|
||||||
|
S extends StrictRJSFSchema = RJSFSchema,
|
||||||
|
F extends FormContextType = any
|
||||||
|
>({ className, onClick, disabled, registry }: IconButtonProps<T, S, F>) {
|
||||||
return (
|
return (
|
||||||
<IconButton title="Add Item" {...props} color="primary">
|
<div className="row">
|
||||||
<AddIcon />
|
<p className={`col-xs-3 col-xs-offset-9 text-right ${className}`}>
|
||||||
</IconButton>
|
<IconButton
|
||||||
|
iconType="info"
|
||||||
|
icon="plus"
|
||||||
|
className="btn-add col-xs-12"
|
||||||
|
title="Add"
|
||||||
|
onClick={onClick}
|
||||||
|
disabled={disabled}
|
||||||
|
registry={registry}
|
||||||
|
/>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
};
|
}
|
||||||
|
|
||||||
export default AddButton;
|
|
||||||
|
|
|
@ -5,6 +5,11 @@ import {
|
||||||
RJSFSchema,
|
RJSFSchema,
|
||||||
StrictRJSFSchema,
|
StrictRJSFSchema,
|
||||||
} from '@rjsf/utils';
|
} from '@rjsf/utils';
|
||||||
|
import {
|
||||||
|
Grid,
|
||||||
|
Column,
|
||||||
|
// @ts-ignore
|
||||||
|
} from '@carbon/react';
|
||||||
|
|
||||||
/** The `ArrayFieldItemTemplate` component is the template used to render an items of an array.
|
/** The `ArrayFieldItemTemplate` component is the template used to render an items of an array.
|
||||||
*
|
*
|
||||||
|
@ -33,53 +38,57 @@ export default function ArrayFieldItemTemplate<
|
||||||
const { MoveDownButton, MoveUpButton, RemoveButton } =
|
const { MoveDownButton, MoveUpButton, RemoveButton } =
|
||||||
registry.templates.ButtonTemplates;
|
registry.templates.ButtonTemplates;
|
||||||
const btnStyle: CSSProperties = {
|
const btnStyle: CSSProperties = {
|
||||||
flex: 1,
|
marginBottom: '0.5em',
|
||||||
paddingLeft: 6,
|
|
||||||
paddingRight: 6,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
};
|
};
|
||||||
|
const mainColumnWidthSmall = 3;
|
||||||
|
const mainColumnWidthMedium = 4;
|
||||||
|
const mainColumnWidthLarge = 7;
|
||||||
return (
|
return (
|
||||||
<div className={className}>
|
<div className={className}>
|
||||||
<div className={hasToolbar ? 'col-xs-9' : 'col-xs-12'}>{children}</div>
|
<Grid condensed fullWidth>
|
||||||
{hasToolbar && (
|
<Column
|
||||||
<div className="col-xs-3 array-item-toolbox">
|
sm={mainColumnWidthSmall}
|
||||||
<div
|
md={mainColumnWidthMedium}
|
||||||
className="btn-group"
|
lg={mainColumnWidthLarge}
|
||||||
style={{
|
>
|
||||||
display: 'flex',
|
{children}
|
||||||
justifyContent: 'space-around',
|
</Column>
|
||||||
}}
|
{hasToolbar && (
|
||||||
>
|
<Column sm={1} md={1} lg={1}>
|
||||||
{(hasMoveUp || hasMoveDown) && (
|
<div className="array-item-toolbox">
|
||||||
<MoveUpButton
|
<div className="NOT-btn-group">
|
||||||
style={btnStyle}
|
{(hasMoveUp || hasMoveDown) && (
|
||||||
disabled={disabled || readonly || !hasMoveUp}
|
<MoveUpButton
|
||||||
onClick={onReorderClick(index, index - 1)}
|
style={btnStyle}
|
||||||
uiSchema={uiSchema}
|
disabled={disabled || readonly || !hasMoveUp}
|
||||||
registry={registry}
|
onClick={onReorderClick(index, index - 1)}
|
||||||
/>
|
uiSchema={uiSchema}
|
||||||
)}
|
registry={registry}
|
||||||
{(hasMoveUp || hasMoveDown) && (
|
/>
|
||||||
<MoveDownButton
|
)}
|
||||||
style={btnStyle}
|
{(hasMoveUp || hasMoveDown) && (
|
||||||
disabled={disabled || readonly || !hasMoveDown}
|
<MoveDownButton
|
||||||
onClick={onReorderClick(index, index + 1)}
|
style={btnStyle}
|
||||||
uiSchema={uiSchema}
|
disabled={disabled || readonly || !hasMoveDown}
|
||||||
registry={registry}
|
onClick={onReorderClick(index, index + 1)}
|
||||||
/>
|
uiSchema={uiSchema}
|
||||||
)}
|
registry={registry}
|
||||||
{hasRemove && (
|
/>
|
||||||
<RemoveButton
|
)}
|
||||||
style={btnStyle}
|
{hasRemove && (
|
||||||
disabled={disabled || readonly}
|
<RemoveButton
|
||||||
onClick={onDropIndexClick(index)}
|
style={btnStyle}
|
||||||
uiSchema={uiSchema}
|
disabled={disabled || readonly}
|
||||||
registry={registry}
|
onClick={onDropIndexClick(index)}
|
||||||
/>
|
uiSchema={uiSchema}
|
||||||
)}
|
registry={registry}
|
||||||
</div>
|
/>
|
||||||
</div>
|
)}
|
||||||
)}
|
</div>
|
||||||
|
</div>
|
||||||
|
</Column>
|
||||||
|
)}
|
||||||
|
</Grid>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,6 +85,11 @@ export default function BaseInputTemplate<
|
||||||
labelToUse = `${labelToUse}*`;
|
labelToUse = `${labelToUse}*`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let helperText = null;
|
||||||
|
if (uiSchema && uiSchema['ui:help']) {
|
||||||
|
helperText = uiSchema['ui:help'];
|
||||||
|
}
|
||||||
|
|
||||||
let invalid = false;
|
let invalid = false;
|
||||||
let errorMessageForField = null;
|
let errorMessageForField = null;
|
||||||
if (rawErrors && rawErrors.length > 0) {
|
if (rawErrors && rawErrors.length > 0) {
|
||||||
|
@ -102,7 +107,7 @@ export default function BaseInputTemplate<
|
||||||
id={id}
|
id={id}
|
||||||
name={id}
|
name={id}
|
||||||
className="input"
|
className="input"
|
||||||
labelText={labelToUse}
|
helperText={helperText}
|
||||||
invalid={invalid}
|
invalid={invalid}
|
||||||
invalidText={errorMessageForField}
|
invalidText={errorMessageForField}
|
||||||
autoFocus={autofocus}
|
autoFocus={autofocus}
|
||||||
|
|
|
@ -7,10 +7,8 @@ import FormHelperText from '@mui/material/FormHelperText';
|
||||||
* @param props - The `FieldHelpProps` to be rendered
|
* @param props - The `FieldHelpProps` to be rendered
|
||||||
*/
|
*/
|
||||||
export default function FieldHelpTemplate(props: FieldHelpProps) {
|
export default function FieldHelpTemplate(props: FieldHelpProps) {
|
||||||
const { idSchema, help } = props;
|
// ui:help is handled by helperText in all carbon widgets.
|
||||||
if (!help) {
|
// see BaseInputTemplate/BaseInputTemplate.tsx and
|
||||||
return null;
|
// SelectWidget/SelectWidget.tsx
|
||||||
}
|
return null;
|
||||||
const id = `${idSchema.$id}__help`;
|
|
||||||
return <FormHelperText id={id}>{help}</FormHelperText>;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,64 +1,57 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import FormControl from '@mui/material/FormControl';
|
import {
|
||||||
import Typography from '@mui/material/Typography';
|
FieldTemplateProps,
|
||||||
import { FieldTemplateProps, getTemplate, getUiOptions } from '@rjsf/utils';
|
FormContextType,
|
||||||
|
RJSFSchema,
|
||||||
|
StrictRJSFSchema,
|
||||||
|
getTemplate,
|
||||||
|
getUiOptions,
|
||||||
|
} from '@rjsf/utils';
|
||||||
|
|
||||||
function FieldTemplate({
|
import Label from './Label';
|
||||||
id,
|
|
||||||
children,
|
/** The `FieldTemplate` component is the template used by `SchemaField` to render any field. It renders the field
|
||||||
classNames,
|
* content, (label, description, children, errors and help) inside of a `WrapIfAdditional` component.
|
||||||
disabled,
|
*
|
||||||
displayLabel,
|
* @param props - The `FieldTemplateProps` for this component
|
||||||
hidden,
|
*/
|
||||||
label,
|
export default function FieldTemplate<
|
||||||
onDropPropertyClick,
|
T = any,
|
||||||
onKeyChange,
|
S extends StrictRJSFSchema = RJSFSchema,
|
||||||
readonly,
|
F extends FormContextType = any
|
||||||
required,
|
>(props: FieldTemplateProps<T, S, F>) {
|
||||||
rawErrors = [],
|
const {
|
||||||
errors,
|
id,
|
||||||
help,
|
label,
|
||||||
rawDescription,
|
children,
|
||||||
schema,
|
errors,
|
||||||
uiSchema,
|
help,
|
||||||
registry,
|
description,
|
||||||
}: FieldTemplateProps) {
|
hidden,
|
||||||
const uiOptions = getUiOptions(uiSchema);
|
required,
|
||||||
const WrapIfAdditionalTemplate = getTemplate<'WrapIfAdditionalTemplate'>(
|
displayLabel,
|
||||||
'WrapIfAdditionalTemplate',
|
|
||||||
registry,
|
registry,
|
||||||
uiOptions
|
uiSchema,
|
||||||
);
|
} = props;
|
||||||
|
const uiOptions = getUiOptions(uiSchema);
|
||||||
|
const WrapIfAdditionalTemplate = getTemplate<
|
||||||
|
'WrapIfAdditionalTemplate',
|
||||||
|
T,
|
||||||
|
S,
|
||||||
|
F
|
||||||
|
>('WrapIfAdditionalTemplate', registry, uiOptions);
|
||||||
if (hidden) {
|
if (hidden) {
|
||||||
return <div style={{ display: 'none' }}>{children}</div>;
|
return <div className="hidden">{children}</div>;
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
<WrapIfAdditionalTemplate
|
<div className="rjsf-field">
|
||||||
classNames={classNames}
|
<WrapIfAdditionalTemplate {...props}>
|
||||||
disabled={disabled}
|
{displayLabel && <Label label={label} required={required} id={id} />}
|
||||||
id={id}
|
{displayLabel && description ? description : null}
|
||||||
label={label}
|
|
||||||
onDropPropertyClick={onDropPropertyClick}
|
|
||||||
onKeyChange={onKeyChange}
|
|
||||||
readonly={readonly}
|
|
||||||
required={required}
|
|
||||||
schema={schema}
|
|
||||||
uiSchema={uiSchema}
|
|
||||||
registry={registry}
|
|
||||||
>
|
|
||||||
<FormControl fullWidth error={!!rawErrors.length} required={required}>
|
|
||||||
{children}
|
{children}
|
||||||
{displayLabel && rawDescription ? (
|
|
||||||
<Typography variant="caption" color="textSecondary">
|
|
||||||
{rawDescription}
|
|
||||||
</Typography>
|
|
||||||
) : null}
|
|
||||||
{errors}
|
{errors}
|
||||||
{help}
|
{help}
|
||||||
</FormControl>
|
</WrapIfAdditionalTemplate>
|
||||||
</WrapIfAdditionalTemplate>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default FieldTemplate;
|
|
||||||
|
|
|
@ -1,55 +1,96 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import IconButton, {
|
import {
|
||||||
IconButtonProps as MuiIconButtonProps,
|
FormContextType,
|
||||||
} from '@mui/material/IconButton';
|
IconButtonProps,
|
||||||
import ArrowDownwardIcon from '@mui/icons-material/ArrowDownward';
|
RJSFSchema,
|
||||||
import ArrowUpwardIcon from '@mui/icons-material/ArrowUpward';
|
StrictRJSFSchema,
|
||||||
import RemoveIcon from '@mui/icons-material/Remove';
|
} from '@rjsf/utils';
|
||||||
import { IconButtonProps } from '@rjsf/utils';
|
|
||||||
|
|
||||||
export default function MuiIconButton(props: IconButtonProps) {
|
// @ts-ignore
|
||||||
const { icon, color, uiSchema, ...otherProps } = props;
|
import { Add, TrashCan, ArrowUp, ArrowDown } from '@carbon/icons-react';
|
||||||
|
|
||||||
|
export default function IconButton<
|
||||||
|
T = any,
|
||||||
|
S extends StrictRJSFSchema = RJSFSchema,
|
||||||
|
F extends FormContextType = any
|
||||||
|
>(props: IconButtonProps<T, S, F>) {
|
||||||
|
const {
|
||||||
|
iconType = 'default',
|
||||||
|
icon,
|
||||||
|
className,
|
||||||
|
uiSchema,
|
||||||
|
registry,
|
||||||
|
...otherProps
|
||||||
|
} = props;
|
||||||
|
// icon string optios: plus, remove, arrow-up, arrow-down
|
||||||
|
let carbonIcon = (
|
||||||
|
<p>
|
||||||
|
Add new <Add />
|
||||||
|
</p>
|
||||||
|
);
|
||||||
|
if (icon === 'remove') {
|
||||||
|
carbonIcon = <TrashCan />;
|
||||||
|
}
|
||||||
|
if (icon === 'arrow-up') {
|
||||||
|
carbonIcon = <ArrowUp />;
|
||||||
|
}
|
||||||
|
if (icon === 'arrow-down') {
|
||||||
|
carbonIcon = <ArrowDown />;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className={`btn btn-${iconType} ${className}`}
|
||||||
|
{...otherProps}
|
||||||
|
>
|
||||||
|
{carbonIcon}
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function MoveDownButton<
|
||||||
|
T = any,
|
||||||
|
S extends StrictRJSFSchema = RJSFSchema,
|
||||||
|
F extends FormContextType = any
|
||||||
|
>(props: IconButtonProps<T, S, F>) {
|
||||||
return (
|
return (
|
||||||
<IconButton
|
<IconButton
|
||||||
{...otherProps}
|
|
||||||
size="small"
|
|
||||||
color={color as MuiIconButtonProps['color']}
|
|
||||||
>
|
|
||||||
{icon}
|
|
||||||
</IconButton>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function MoveDownButton(props: IconButtonProps) {
|
|
||||||
return (
|
|
||||||
<MuiIconButton
|
|
||||||
title="Move down"
|
title="Move down"
|
||||||
|
className="array-item-move-down"
|
||||||
{...props}
|
{...props}
|
||||||
icon={<ArrowDownwardIcon fontSize="small" />}
|
icon="arrow-down"
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function MoveUpButton(props: IconButtonProps) {
|
export function MoveUpButton<
|
||||||
|
T = any,
|
||||||
|
S extends StrictRJSFSchema = RJSFSchema,
|
||||||
|
F extends FormContextType = any
|
||||||
|
>(props: IconButtonProps<T, S, F>) {
|
||||||
return (
|
return (
|
||||||
<MuiIconButton
|
<IconButton
|
||||||
title="Move up"
|
title="Move up"
|
||||||
|
className="array-item-move-up"
|
||||||
{...props}
|
{...props}
|
||||||
icon={<ArrowUpwardIcon fontSize="small" />}
|
icon="arrow-up"
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function RemoveButton(props: IconButtonProps) {
|
export function RemoveButton<
|
||||||
const { iconType, ...otherProps } = props;
|
T = any,
|
||||||
|
S extends StrictRJSFSchema = RJSFSchema,
|
||||||
|
F extends FormContextType = any
|
||||||
|
>(props: IconButtonProps<T, S, F>) {
|
||||||
return (
|
return (
|
||||||
<MuiIconButton
|
<IconButton
|
||||||
title="Remove"
|
title="Remove"
|
||||||
{...otherProps}
|
className="array-item-remove"
|
||||||
color="error"
|
{...props}
|
||||||
icon={
|
iconType="danger"
|
||||||
<RemoveIcon fontSize={iconType === 'default' ? undefined : 'small'} />
|
icon="remove"
|
||||||
}
|
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,9 +32,6 @@ const RadioWidget = ({
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<FormLabel required={required} htmlFor={id}>
|
|
||||||
{label || schema.title}
|
|
||||||
</FormLabel>
|
|
||||||
<RadioGroup
|
<RadioGroup
|
||||||
id={id}
|
id={id}
|
||||||
name={id}
|
name={id}
|
||||||
|
|
|
@ -41,6 +41,10 @@ function SelectWidget({
|
||||||
} else if (schema && schema.title) {
|
} else if (schema && schema.title) {
|
||||||
labelToUse = schema.title;
|
labelToUse = schema.title;
|
||||||
}
|
}
|
||||||
|
let helperText = null;
|
||||||
|
if (uiSchema && uiSchema['ui:help']) {
|
||||||
|
helperText = uiSchema['ui:help'];
|
||||||
|
}
|
||||||
if (required) {
|
if (required) {
|
||||||
labelToUse = `${labelToUse}*`;
|
labelToUse = `${labelToUse}*`;
|
||||||
}
|
}
|
||||||
|
@ -49,16 +53,20 @@ function SelectWidget({
|
||||||
let errorMessageForField = null;
|
let errorMessageForField = null;
|
||||||
if (rawErrors && rawErrors.length > 0) {
|
if (rawErrors && rawErrors.length > 0) {
|
||||||
invalid = true;
|
invalid = true;
|
||||||
errorMessageForField = `${labelToUse.replace(/\*$/, '')} ${rawErrors[0]}`;
|
// errorMessageForField = `${labelToUse.replace(/\*$/, '')} ${rawErrors[0]}`;
|
||||||
|
errorMessageForField = rawErrors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// maybe use placeholder somehow. it was previously jammed into the helperText field,
|
||||||
|
// but allowing ui:help to grab that spot seems much more appropriate.
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Select
|
<Select
|
||||||
id={id}
|
id={id}
|
||||||
name={id}
|
name={id}
|
||||||
labelText={labelToUse}
|
labelText=""
|
||||||
select
|
select
|
||||||
helperText={placeholder}
|
helperText={helperText}
|
||||||
value={typeof value === 'undefined' ? emptyValue : value}
|
value={typeof value === 'undefined' ? emptyValue : value}
|
||||||
disabled={disabled || readonly}
|
disabled={disabled || readonly}
|
||||||
autoFocus={autofocus}
|
autoFocus={autofocus}
|
||||||
|
|
|
@ -65,7 +65,7 @@ function TextareaWidget<
|
||||||
let errorMessageForField = null;
|
let errorMessageForField = null;
|
||||||
if (rawErrors && rawErrors.length > 0) {
|
if (rawErrors && rawErrors.length > 0) {
|
||||||
invalid = true;
|
invalid = true;
|
||||||
errorMessageForField = `${labelToUse.replace(/\*$/, '')} ${rawErrors[0]}`;
|
errorMessageForField = rawErrors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -74,7 +74,7 @@ function TextareaWidget<
|
||||||
name={id}
|
name={id}
|
||||||
className="form-control"
|
className="form-control"
|
||||||
value={value || ''}
|
value={value || ''}
|
||||||
labelText={labelToUse}
|
labelText=""
|
||||||
placeholder={placeholder}
|
placeholder={placeholder}
|
||||||
required={required}
|
required={required}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
button.react-json-schema-form-submit-button {
|
|
||||||
margin-top: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.rjsf .header {
|
.rjsf .header {
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
font-size: 20px;
|
font-size: 20px;
|
||||||
|
@ -17,6 +13,11 @@ button.react-json-schema-form-submit-button {
|
||||||
margin-bottom: 1em;
|
margin-bottom: 1em;
|
||||||
}
|
}
|
||||||
|
|
||||||
.rjsf .input {
|
/* for some reason it wraps the entire form using FieldTemplate.jsx, which is where we added the rjsf-field thing (which is only intended for fields, not entire forms. hence the double rjsf-field reference, only for rjsf-fields inside rjsf-fields, so we don't get double margin after the last field */
|
||||||
|
.rjsf .rjsf-field .rjsf-field {
|
||||||
margin-bottom: 2em;
|
margin-bottom: 2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.array-item-toolbox {
|
||||||
|
margin-left: 2em;
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue