Merge branch 'main' into cullerton
This commit is contained in:
commit
d4b31efd5a
|
@ -1,5 +1,5 @@
|
|||
pip==22.1.2
|
||||
nox==2022.1.7
|
||||
nox-poetry==1.0.1
|
||||
poetry==1.1.13
|
||||
poetry==1.1.14
|
||||
virtualenv==20.15.1
|
||||
|
|
|
@ -12,18 +12,27 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
# originally from https://medium.com/keycloak/keycloak-jwt-token-using-curl-post-72c9e791ba8c
|
||||
# btw, meta config endpoint: http://localhost:7002/realms/spiffworkflow/.well-known/openid-configuration
|
||||
# token exchange described at https://github.com/keycloak/keycloak-documentation/blob/main/securing_apps/topics/token-exchange/token-exchange.adoc
|
||||
# some UMA stuff at https://github.com/keycloak/keycloak-documentation/blob/main/authorization_services/topics/service-authorization-obtaining-permission.adoc,
|
||||
# though resource_set docs are elsewhere.
|
||||
|
||||
# ./bin/get_token # uses ciuser1 ciuser1
|
||||
# ./bin/get_token ciadmin1 ciadmin1
|
||||
# ./bin/get_token repeat_form_user_1 repeat_form_user_1 # actually has permissions to the resource in this script
|
||||
# ./bin/get_token ciadmin1 ciadmin1 '%2Fprocess-models'
|
||||
|
||||
HOSTNAME=localhost:7002
|
||||
REALM_NAME=spiffworkflow
|
||||
USERNAME=${1-ciuser1}
|
||||
PASSWORD=${2-ciuser1}
|
||||
URI_TO_TEST_AGAINST=${3-'%2Fprocess-models%2Fcategory_number_one%2Fprocess-model-with-repeating-form'}
|
||||
|
||||
FRONTEND_CLIENT_ID=spiffworkflow-frontend
|
||||
BACKEND_CLIENT_ID=spiffworkflow-backend
|
||||
BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105
|
||||
SECURE=false
|
||||
|
||||
BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64 -w0)
|
||||
BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64)
|
||||
KEYCLOAK_URL=http://$HOSTNAME/realms/$REALM_NAME/protocol/openid-connect/token
|
||||
|
||||
echo "Using Keycloak: $KEYCLOAK_URL"
|
||||
|
@ -63,7 +72,7 @@ if [[ "$backend_token" != 'null' ]]; then
|
|||
echo "backend_token: $backend_token"
|
||||
|
||||
echo "Getting resource set"
|
||||
resource_result=$(curl -s "http://localhost:7002/realms/spiffworkflow/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=%2Fprocess-models%2Fcategory_number_one%2Fprocess-model-with-repeating-form" -H "Authorization: Bearer $backend_token")
|
||||
resource_result=$(curl -s "http://localhost:7002/realms/spiffworkflow/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${URI_TO_TEST_AGAINST}" -H "Authorization: Bearer $backend_token")
|
||||
|
||||
resource_ids=$(jq -r '.[] | ._id' <<<"$resource_result" || echo '')
|
||||
if [[ -z "$resource_ids" || "$resource_ids" == "null" ]]; then
|
||||
|
|
|
@ -32,9 +32,12 @@ def main():
|
|||
columns_to_data_key_mappings = {
|
||||
"Month": "month",
|
||||
"MS": "milestone",
|
||||
"Done?": "done",
|
||||
"#": "notion_id",
|
||||
"ID": "req_id",
|
||||
"Dev Days": "dev_days",
|
||||
"Feature": "feature",
|
||||
"Feature description": "feature_description",
|
||||
"Priority": "priority",
|
||||
}
|
||||
columns_to_header_index_mappings = {}
|
||||
|
|
|
@ -20,6 +20,8 @@ docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/spiffworkflow-re
|
|||
docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/quarkus-realm.json || echo ''
|
||||
echo 'ran import finance realm'
|
||||
|
||||
docker stop keycloak
|
||||
docker start keycloak
|
||||
|
||||
|
||||
# to export:
|
||||
|
|
|
@ -618,23 +618,18 @@ develop = false
|
|||
[package.dependencies]
|
||||
click = "^8.0.1"
|
||||
flask = "*"
|
||||
flask-admin = "*"
|
||||
flask-bcrypt = "*"
|
||||
flask-cors = "*"
|
||||
flask-mail = "*"
|
||||
flask-marshmallow = "*"
|
||||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
sentry-sdk = "1.7.0"
|
||||
sentry-sdk = "1.7.1"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/parse_spiffworkflow_extensions"}
|
||||
spiffworkflow = "*"
|
||||
werkzeug = "*"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "c454e729c634c7c86ff30cf4d388480647d18d7b"
|
||||
reference = "feature/with-spiff-properties"
|
||||
resolved_reference = "c7497aabb039420d9e7c68a50d7a4b3e74100e81"
|
||||
|
||||
[[package]]
|
||||
name = "flask-cors"
|
||||
|
@ -1410,19 +1405,19 @@ pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)", "pyasn1"]
|
|||
|
||||
[[package]]
|
||||
name = "python-keycloak"
|
||||
version = "1.8.0"
|
||||
version = "1.9.1"
|
||||
description = "python-keycloak is a Python package providing access to the Keycloak API."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.7,<4.0"
|
||||
|
||||
[package.dependencies]
|
||||
python-jose = ">=1.4.0"
|
||||
requests = ">=2.20.0"
|
||||
urllib3 = ">=1.26.0"
|
||||
python-jose = ">=3.3.0,<4.0.0"
|
||||
requests = ">=2.20.0,<3.0.0"
|
||||
urllib3 = ">=1.26.0,<2.0.0"
|
||||
|
||||
[package.extras]
|
||||
docs = ["mock", "alabaster", "commonmark", "recommonmark", "sphinx", "sphinx-rtd-theme", "readthedocs-sphinx-ext", "m2r2", "sphinx-autoapi"]
|
||||
docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
|
@ -1561,7 +1556,7 @@ requests = "*"
|
|||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "1.7.0"
|
||||
version = "1.7.1"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
category = "main"
|
||||
optional = false
|
||||
|
@ -1806,8 +1801,8 @@ pytz = "*"
|
|||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "feature/parse_spiffworkflow_extensions"
|
||||
resolved_reference = "67054883d4040d6755bf0555f072ff85aa42093c"
|
||||
reference = "feature/spiff_properties"
|
||||
resolved_reference = "e108aa12da008bdd8d0319e182d28fbd3afb4c67"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -2098,7 +2093,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "c20a647c5a3e12bb5e1e9280316566acaa548e6722b7e4e13b610f25877bd4d0"
|
||||
content-hash = "66503576ef158089a92526ed6982bc93481868a47fec14c47d1ce9a5bcc08979"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2284,7 +2279,10 @@ dparse = [
|
|||
{file = "dparse-0.5.1-py3-none-any.whl", hash = "sha256:e953a25e44ebb60a5c6efc2add4420c177f1d8404509da88da9729202f306994"},
|
||||
{file = "dparse-0.5.1.tar.gz", hash = "sha256:a1b5f169102e1c894f9a7d5ccf6f9402a836a5d24be80a986c7ce9eaed78f367"},
|
||||
]
|
||||
ecdsa = []
|
||||
ecdsa = [
|
||||
{file = "ecdsa-0.17.0-py2.py3-none-any.whl", hash = "sha256:5cf31d5b33743abe0dfc28999036c849a69d548f994b535e527ee3cb7f3ef676"},
|
||||
{file = "ecdsa-0.17.0.tar.gz", hash = "sha256:b9f500bb439e4153d0330610f5d26baaf18d17b8ced1bc54410d189385ea68aa"},
|
||||
]
|
||||
filelock = [
|
||||
{file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"},
|
||||
{file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"},
|
||||
|
@ -2858,7 +2856,10 @@ python-dateutil = [
|
|||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
]
|
||||
python-jose = []
|
||||
python-jose = [
|
||||
{file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"},
|
||||
{file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"},
|
||||
]
|
||||
python-keycloak = []
|
||||
pytz = [
|
||||
{file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
|
||||
|
@ -3057,7 +3058,10 @@ sphinx-basic-ng = [
|
|||
{file = "sphinx_basic_ng-0.0.1a11-py3-none-any.whl", hash = "sha256:9aecb5345816998789ef76658a83e3c0a12aafa14b17d40e28cd4aaeb94d1517"},
|
||||
{file = "sphinx_basic_ng-0.0.1a11.tar.gz", hash = "sha256:bf9a8fda0379c7d2ab51c9543f2b18e014b77fb295b49d64f3c1a910c863b34f"},
|
||||
]
|
||||
sphinx-click = []
|
||||
sphinx-click = [
|
||||
{file = "sphinx-click-4.3.0.tar.gz", hash = "sha256:bd4db5d3c1bec345f07af07b8e28a76cfc5006d997984e38ae246bbf8b9a3b38"},
|
||||
{file = "sphinx_click-4.3.0-py3-none-any.whl", hash = "sha256:23e85a3cb0b728a421ea773699f6acadefae171d1a764a51dd8ec5981503ccbe"},
|
||||
]
|
||||
sphinxcontrib-applehelp = [
|
||||
{file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},
|
||||
{file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"},
|
||||
|
|
|
@ -27,13 +27,13 @@ flask-marshmallow = "*"
|
|||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/parse_spiffworkflow_extensions"}
|
||||
# spiffworkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "1.7.0"
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/spiff_properties"}
|
||||
# spiffworkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "1.7.1"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
|
||||
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "feature/with-spiff-properties"}
|
||||
mysql-connector-python = "^8.0.29"
|
||||
pytest-flask = "^1.2.0"
|
||||
pytest-flask-sqlalchemy = "^1.1.0"
|
||||
|
@ -46,7 +46,7 @@ marshmallow-sqlalchemy = "^0.28.0"
|
|||
PyJWT = "^2.4.0"
|
||||
gunicorn = "^20.1.0"
|
||||
types-pytz = "^2022.1.1"
|
||||
python-keycloak = "^1.8.0"
|
||||
python-keycloak = "^1.9.1"
|
||||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
|
|
@ -14,7 +14,6 @@ from flask_mail import Mail # type: ignore
|
|||
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||
from spiffworkflow_backend.config import setup_config
|
||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.routes.api_blueprint import api_blueprint
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||
|
||||
|
@ -55,7 +54,6 @@ def create_app() -> flask.app.Flask:
|
|||
migrate.init_app(app, db)
|
||||
|
||||
app.register_blueprint(user_blueprint)
|
||||
app.register_blueprint(api_blueprint)
|
||||
app.register_blueprint(process_api_blueprint)
|
||||
app.register_blueprint(api_error_blueprint)
|
||||
app.register_blueprint(admin_blueprint, url_prefix="/admin")
|
||||
|
|
|
@ -165,7 +165,6 @@ class ProcessInstanceApi:
|
|||
next_task: Task | None,
|
||||
process_model_identifier: str,
|
||||
process_group_identifier: str,
|
||||
total_tasks: int,
|
||||
completed_tasks: int,
|
||||
updated_at_in_seconds: int,
|
||||
is_review: bool,
|
||||
|
@ -178,7 +177,6 @@ class ProcessInstanceApi:
|
|||
# self.navigation = navigation fixme: would be a hotness.
|
||||
self.process_model_identifier = process_model_identifier
|
||||
self.process_group_identifier = process_group_identifier
|
||||
self.total_tasks = total_tasks
|
||||
self.completed_tasks = completed_tasks
|
||||
self.updated_at_in_seconds = updated_at_in_seconds
|
||||
self.title = title
|
||||
|
@ -199,7 +197,6 @@ class ProcessInstanceApiSchema(Schema):
|
|||
"navigation",
|
||||
"process_model_identifier",
|
||||
"process_group_identifier",
|
||||
"total_tasks",
|
||||
"completed_tasks",
|
||||
"updated_at_in_seconds",
|
||||
"is_review",
|
||||
|
@ -228,7 +225,6 @@ class ProcessInstanceApiSchema(Schema):
|
|||
"navigation",
|
||||
"process_model_identifier",
|
||||
"process_group_identifier",
|
||||
"total_tasks",
|
||||
"completed_tasks",
|
||||
"updated_at_in_seconds",
|
||||
"is_review",
|
||||
|
@ -251,7 +247,6 @@ class ProcessInstanceMetadata:
|
|||
spec_version: str | None = None
|
||||
state: str | None = None
|
||||
status: str | None = None
|
||||
total_tasks: int | None = None
|
||||
completed_tasks: int | None = None
|
||||
is_review: bool | None = None
|
||||
state_message: str | None = None
|
||||
|
@ -270,7 +265,6 @@ class ProcessInstanceMetadata:
|
|||
process_group_id=process_model.process_group_id,
|
||||
state_message=process_instance.state_message,
|
||||
status=process_instance.status,
|
||||
total_tasks=process_instance.total_tasks,
|
||||
completed_tasks=process_instance.completed_tasks,
|
||||
is_review=process_model.is_review,
|
||||
process_model_identifier=process_instance.process_model_identifier,
|
||||
|
@ -292,7 +286,6 @@ class ProcessInstanceMetadataSchema(Schema):
|
|||
"display_name",
|
||||
"description",
|
||||
"state",
|
||||
"total_tasks",
|
||||
"completed_tasks",
|
||||
"process_group_id",
|
||||
"is_review",
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
"""Api."""
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from flask import Response
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter # type: ignore
|
||||
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.spiff_workflow_connector import parse
|
||||
from spiffworkflow_backend.spiff_workflow_connector import run
|
||||
|
||||
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||
[UserTaskConverter, BusinessRuleTaskConverter]
|
||||
)
|
||||
serializer = BpmnWorkflowSerializer(wf_spec_converter)
|
||||
|
||||
api_blueprint = Blueprint("api", __name__)
|
||||
|
||||
|
||||
@api_blueprint.route("/run_process", methods=["POST"])
|
||||
def run_process() -> Response:
|
||||
"""Run_process."""
|
||||
content = request.json
|
||||
if content is None:
|
||||
return Response(
|
||||
json.dumps({"error": "Could not find json request"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
bpmn_spec_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
process = "order_product"
|
||||
dmn = [
|
||||
os.path.join(bpmn_spec_dir, "product_prices.dmn"),
|
||||
os.path.join(bpmn_spec_dir, "shipping_costs.dmn"),
|
||||
]
|
||||
bpmn = [
|
||||
os.path.join(bpmn_spec_dir, "multiinstance.bpmn"),
|
||||
os.path.join(bpmn_spec_dir, "call_activity_multi.bpmn"),
|
||||
]
|
||||
|
||||
workflow = None
|
||||
process_instance = ProcessInstanceModel.query.filter().first()
|
||||
if process_instance is None:
|
||||
workflow = parse(process, bpmn, dmn)
|
||||
else:
|
||||
workflow = serializer.deserialize_json(process_instance.bpmn_json)
|
||||
|
||||
response = run(workflow, content.get("task_identifier"), content.get("answer"))
|
||||
|
||||
return Response(
|
||||
json.dumps({"response": response}), status=200, mimetype="application/json"
|
||||
)
|
|
@ -14,6 +14,7 @@ from lxml import etree # type: ignore
|
|||
from SpiffWorkflow import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow import TaskState
|
||||
from SpiffWorkflow import WorkflowException
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
|
@ -23,13 +24,12 @@ from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ig
|
|||
from SpiffWorkflow.bpmn.specs.events import CancelEventDefinition # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser # type: ignore
|
||||
from SpiffWorkflow.camunda.serializer import UserTaskConverter # type: ignore
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskExecException # type: ignore
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||
from SpiffWorkflow.specs import WorkflowSpec # type: ignore
|
||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer import UserTaskConverter # type: ignore
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
|
@ -90,10 +90,10 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
|
||||
|
||||
class MyCustomParser(BpmnDmnParser): # type: ignore
|
||||
"""A BPMN and DMN parser that can also parse Camunda forms."""
|
||||
"""A BPMN and DMN parser that can also parse spiffworkflow-specific extensions."""
|
||||
|
||||
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
|
||||
OVERRIDE_PARSER_CLASSES.update(CamundaParser.OVERRIDE_PARSER_CLASSES)
|
||||
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)
|
||||
|
||||
|
||||
class ProcessInstanceProcessor:
|
||||
|
@ -355,8 +355,13 @@ class ProcessInstanceProcessor:
|
|||
extensions = ready_or_waiting_task.task_spec.extensions
|
||||
|
||||
form_file_name = None
|
||||
if "formKey" in extensions:
|
||||
form_file_name = extensions["formKey"]
|
||||
if "properties" in extensions:
|
||||
properties = extensions["properties"]
|
||||
if "formJsonSchemaFilename" in properties:
|
||||
form_file_name = properties["formJsonSchemaFilename"]
|
||||
# FIXME:
|
||||
# if "formUiSchemaFilename" in properties:
|
||||
# form_file_name = properties["formUiSchemaFilename"]
|
||||
|
||||
active_task = ActiveTaskModel(
|
||||
spiffworkflow_task_id=str(ready_or_waiting_task.id),
|
||||
|
|
|
@ -67,7 +67,7 @@ class ProcessInstanceService:
|
|||
|
||||
If requested, and possible, next_task is set to the current_task.
|
||||
"""
|
||||
navigation = processor.bpmn_process_instance.get_deep_nav_list()
|
||||
# navigation = processor.bpmn_process_instance.get_deep_nav_list()
|
||||
# ProcessInstanceService.update_navigation(navigation, processor)
|
||||
process_model_service = ProcessModelService()
|
||||
process_model = process_model_service.get_process_model(
|
||||
|
@ -82,7 +82,7 @@ class ProcessInstanceService:
|
|||
# navigation=navigation,
|
||||
process_model_identifier=processor.process_model_identifier,
|
||||
process_group_identifier=processor.process_group_identifier,
|
||||
total_tasks=len(navigation),
|
||||
# total_tasks=len(navigation),
|
||||
completed_tasks=processor.process_instance_model.completed_tasks,
|
||||
updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds,
|
||||
is_review=is_review_value,
|
||||
|
|
|
@ -1,214 +0,0 @@
|
|||
"""Spiff Workflow Connector."""
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.event_types import CatchingEvent # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.event_types import ThrowingEvent
|
||||
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser # type: ignore
|
||||
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter # type: ignore
|
||||
from SpiffWorkflow.camunda.specs.UserTask import EnumFormField # type: ignore
|
||||
from SpiffWorkflow.camunda.specs.UserTask import UserTask
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
||||
from SpiffWorkflow.task import Task # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||
[UserTaskConverter, BusinessRuleTaskConverter]
|
||||
)
|
||||
serializer = BpmnWorkflowSerializer(wf_spec_converter)
|
||||
|
||||
|
||||
class Parser(BpmnDmnParser): # type: ignore
|
||||
"""Parser."""
|
||||
|
||||
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
|
||||
OVERRIDE_PARSER_CLASSES.update(CamundaParser.OVERRIDE_PARSER_CLASSES)
|
||||
|
||||
|
||||
class ProcessStatus(TypedDict, total=False):
|
||||
"""ProcessStatus."""
|
||||
|
||||
last_task: str
|
||||
upcoming_tasks: List[str]
|
||||
next_activity: Dict[str, str]
|
||||
|
||||
|
||||
def parse(process: str, bpmn_files: List[str], dmn_files: List[str]) -> BpmnWorkflow:
|
||||
"""Parse."""
|
||||
parser = Parser()
|
||||
parser.add_bpmn_files(bpmn_files)
|
||||
if dmn_files:
|
||||
parser.add_dmn_files(dmn_files)
|
||||
return BpmnWorkflow(parser.get_spec(process))
|
||||
|
||||
|
||||
def format_task(task: Task, include_state: bool = True) -> str:
|
||||
"""Format_task."""
|
||||
if hasattr(task.task_spec, "lane") and task.task_spec.lane is not None:
|
||||
lane = f"[{task.task_spec.lane}]"
|
||||
else:
|
||||
lane = ""
|
||||
state = f"[{task.get_state_name()}]" if include_state else ""
|
||||
return f"{lane} {task.task_spec.description} ({task.task_spec.name}) {state}"
|
||||
|
||||
|
||||
def process_field(
|
||||
field: Any, answer: Union[dict, None], required_user_input_fields: Dict[str, str]
|
||||
) -> Union[str, int, None]:
|
||||
"""Handles the complexities of figuring out what to do about each necessary user field."""
|
||||
response = None
|
||||
if isinstance(field, EnumFormField):
|
||||
option_map = {opt.name: opt.id for opt in field.options}
|
||||
options = "(" + ", ".join(option_map) + ")"
|
||||
if answer is None:
|
||||
required_user_input_fields[field.label] = options
|
||||
else:
|
||||
response = option_map[answer[field.label]]
|
||||
elif field.type == "string":
|
||||
if answer is None:
|
||||
required_user_input_fields[field.label] = "STRING"
|
||||
else:
|
||||
response = answer[field.label]
|
||||
else:
|
||||
if answer is None:
|
||||
required_user_input_fields[field.label] = "(1..)"
|
||||
else:
|
||||
if field.type == "long":
|
||||
response = int(answer[field.label])
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def complete_user_task(
|
||||
task: Task, answer: Optional[Dict[str, str]] = None
|
||||
) -> Dict[Any, Any]:
|
||||
"""Complete_user_task."""
|
||||
if task.data is None:
|
||||
task.data = {}
|
||||
|
||||
required_user_input_fields: Dict[str, str] = {}
|
||||
for field in task.task_spec.form.fields:
|
||||
response = process_field(field, answer, required_user_input_fields)
|
||||
if answer:
|
||||
task.update_data_var(field.id, response)
|
||||
return required_user_input_fields
|
||||
|
||||
|
||||
def get_state(workflow: BpmnWorkflow) -> ProcessStatus:
|
||||
"""Print_state."""
|
||||
task = workflow.last_task
|
||||
|
||||
return_json: ProcessStatus = {"last_task": format_task(task), "upcoming_tasks": []}
|
||||
|
||||
display_types = (UserTask, ManualTask, ScriptTask, ThrowingEvent, CatchingEvent)
|
||||
all_tasks = [
|
||||
task
|
||||
for task in workflow.get_tasks()
|
||||
if isinstance(task.task_spec, display_types)
|
||||
]
|
||||
upcoming_tasks = [
|
||||
task for task in all_tasks if task.state in [TaskState.READY, TaskState.WAITING]
|
||||
]
|
||||
|
||||
for _idx, task in enumerate(upcoming_tasks):
|
||||
return_json["upcoming_tasks"].append(format_task(task))
|
||||
|
||||
return return_json
|
||||
|
||||
|
||||
def create_user() -> UserModel:
|
||||
"""Create_user."""
|
||||
user = UserModel(username="user1")
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
|
||||
return user
|
||||
|
||||
|
||||
def create_process_instance() -> ProcessInstanceModel:
|
||||
"""Create_process_instance."""
|
||||
user = UserModel.query.filter().first()
|
||||
if user is None:
|
||||
user = create_user()
|
||||
|
||||
process_instance = ProcessInstanceModel(
|
||||
process_model_identifier="process_model1", process_initiator_id=user.id
|
||||
)
|
||||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
|
||||
return process_instance
|
||||
|
||||
|
||||
def run(
|
||||
workflow: BpmnWorkflow,
|
||||
task_identifier: Optional[str] = None,
|
||||
answer: Optional[Dict[str, str]] = None,
|
||||
) -> Union[ProcessStatus, Dict[str, str]]:
|
||||
"""Run."""
|
||||
workflow.do_engine_steps()
|
||||
tasks_status = ProcessStatus()
|
||||
|
||||
if workflow.is_completed():
|
||||
return tasks_status
|
||||
|
||||
ready_tasks = workflow.get_ready_user_tasks()
|
||||
options = {}
|
||||
formatted_options = {}
|
||||
|
||||
for idx, task in enumerate(ready_tasks):
|
||||
option = format_task(task, False)
|
||||
options[str(idx + 1)] = task
|
||||
formatted_options[str(idx + 1)] = option
|
||||
|
||||
if task_identifier is None:
|
||||
return formatted_options
|
||||
|
||||
next_task = options[task_identifier]
|
||||
if isinstance(next_task.task_spec, UserTask):
|
||||
if answer is None:
|
||||
return complete_user_task(next_task)
|
||||
else:
|
||||
complete_user_task(next_task, answer)
|
||||
next_task.complete()
|
||||
elif isinstance(next_task.task_spec, ManualTask):
|
||||
next_task.complete()
|
||||
else:
|
||||
next_task.complete()
|
||||
|
||||
workflow.refresh_waiting_tasks()
|
||||
workflow.do_engine_steps()
|
||||
tasks_status = get_state(workflow)
|
||||
|
||||
ready_tasks = workflow.get_ready_user_tasks()
|
||||
formatted_options = {}
|
||||
for idx, task in enumerate(ready_tasks):
|
||||
option = format_task(task, False)
|
||||
formatted_options[str(idx + 1)] = option
|
||||
|
||||
state = serializer.serialize_json(workflow)
|
||||
process_instance = ProcessInstanceModel.query.filter().first()
|
||||
|
||||
if process_instance is None:
|
||||
process_instance = create_process_instance()
|
||||
process_instance.bpmn_json = state
|
||||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
|
||||
tasks_status["next_activity"] = formatted_options
|
||||
|
||||
return tasks_status
|
Loading…
Reference in New Issue