Merge branch 'main' into feature/process-nav-improvements

This commit is contained in:
Elizabeth Esswein 2022-12-30 11:41:08 -05:00
commit 292fd0a1f2
19 changed files with 2289 additions and 2085 deletions

View File

@ -7,4 +7,5 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
docker compose logs "$@"
# "docker compose logs" is only getting the db logs so specify them both
docker compose logs db spiffworkflow-backend

View File

@ -2989,7 +2989,18 @@ psycopg2 = [
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
]
pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pycodestyle = [

View File

@ -23,7 +23,6 @@ from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_b
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
openid_blueprint,
)
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
from spiffworkflow_backend.services.authorization_service import AuthorizationService
@ -104,7 +103,6 @@ def create_app() -> flask.app.Flask:
migrate.init_app(app, db)
app.register_blueprint(user_blueprint)
app.register_blueprint(process_api_blueprint)
app.register_blueprint(api_error_blueprint)
app.register_blueprint(admin_blueprint, url_prefix="/admin")
app.register_blueprint(openid_blueprint, url_prefix="/openid")

View File

@ -8,10 +8,6 @@ servers:
- url: http://localhost:5000/v1.0
# this is handled in flask now
security: []
# - jwt: ["secret"]
# - oAuth2AuthCode:
# - read_email
# - uid
paths:
/login:
@ -22,7 +18,6 @@ paths:
schema:
type: string
get:
security: []
summary: redirect to open id authentication server
operationId: spiffworkflow_backend.routes.user.login
tags:
@ -48,7 +43,6 @@ paths:
schema:
type: string
get:
security: []
operationId: spiffworkflow_backend.routes.user.login_return
tags:
- Authentication
@ -68,7 +62,6 @@ paths:
schema:
type: string
get:
security: []
operationId: spiffworkflow_backend.routes.user.logout
summary: Logout authenticated user
tags:
@ -78,7 +71,6 @@ paths:
description: Logout Authenticated User
/logout_return:
get:
security: []
operationId: spiffworkflow_backend.routes.user.logout_return
summary: Logout authenticated user
tags:
@ -89,7 +81,6 @@ paths:
/login_api:
get:
security: []
operationId: spiffworkflow_backend.routes.user.login_api
summary: Authenticate user for API access
tags:
@ -115,7 +106,6 @@ paths:
schema:
type: string
get:
security: []
operationId: spiffworkflow_backend.routes.user.login_api_return
tags:
- Authentication
@ -125,8 +115,7 @@ paths:
/status:
get:
security: []
operationId: spiffworkflow_backend.routes.process_api_blueprint.status
operationId: spiffworkflow_backend.routes.health_controller.status
summary: Returns 200 if the server is Responding
tags:
- Liveness
@ -160,7 +149,7 @@ paths:
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_list
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_list
summary: get list
tags:
- Process Groups
@ -174,7 +163,7 @@ paths:
items:
$ref: "#/components/schemas/ProcessModelCategory"
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_create
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_create
summary: Add process group
tags:
- Process Groups
@ -201,7 +190,7 @@ paths:
type: string
# process_group_show
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_show
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_show
summary: Returns a single process group
tags:
- Process Groups
@ -213,7 +202,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_delete
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_delete
summary: Deletes a single process group
tags:
- Process Groups
@ -221,7 +210,7 @@ paths:
"200":
description: The process group was deleted.
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_update
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_update
summary: Updates a single process group
tags:
- Process Groups
@ -253,7 +242,7 @@ paths:
schema:
type: string
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_move
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_move
summary: returns the new group
tags:
- Process Groups
@ -298,7 +287,7 @@ paths:
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_list
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_list
summary: Return a list of process models for a given process group
tags:
- Process Models
@ -321,7 +310,7 @@ paths:
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_create
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create
summary: Creates a new process model with the given parameters.
tags:
- Process Models
@ -347,7 +336,7 @@ paths:
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_create
summary: Add a new workflow spec file
tags:
- Process Model Files
@ -377,7 +366,7 @@ paths:
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_show
summary: Returns a single process model
tags:
- Process Models
@ -389,7 +378,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_update
summary: Modifies an existing process model with the given parameters.
tags:
- Process Models
@ -406,7 +395,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_delete
summary: Removes an existing process model
tags:
- Process Models
@ -433,7 +422,7 @@ paths:
schema:
type: string
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_move
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_move
summary: returns the new model
tags:
- Process Models
@ -460,7 +449,7 @@ paths:
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_publish
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_publish
summary: Merge changes from this model to another branch.
tags:
- Process Models
@ -608,7 +597,7 @@ paths:
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list_for_me
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me
summary: Returns a list of process instances that are associated with me.
tags:
- Process Instances
@ -721,7 +710,7 @@ paths:
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list
summary: Returns a list of process instances.
tags:
- Process Instances
@ -744,7 +733,7 @@ paths:
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_create
operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_create
summary: Create script unit test based on given criteria
tags:
- Script Unit Test
@ -765,7 +754,7 @@ paths:
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_run
operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_run
summary: Run a given script unit test.
tags:
- Script Unit Test
@ -786,7 +775,7 @@ paths:
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_create
summary: Creates an process instance from a process model and returns the instance
tags:
- Process Instances
@ -833,7 +822,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data_for_me
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data_for_me
summary: returns the list of all user tasks associated with process instance without the task data
responses:
"200":
@ -880,7 +869,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data
summary: returns the list of all user tasks associated with process instance without the task data
responses:
"200":
@ -915,7 +904,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show_for_me
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show_for_me
summary: Show information about a process instance that is associated with me
responses:
"200":
@ -948,7 +937,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show
summary: Show information about a process instance
responses:
"200":
@ -958,7 +947,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_delete
summary: Deletes a single process instance
tags:
- Process Instances
@ -985,7 +974,7 @@ paths:
schema:
type: boolean
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_run
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_run
summary: Run a process instance
tags:
- Process Instances
@ -1006,7 +995,7 @@ paths:
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_terminate
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_terminate
summary: Terminate a process instance
tags:
- Process Instances
@ -1027,7 +1016,7 @@ paths:
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_suspend
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_suspend
summary: Suspend a process instance
tags:
- Process Instances
@ -1048,7 +1037,7 @@ paths:
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_resume
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_resume
summary: Resume a process instance
tags:
- Process Instances
@ -1081,7 +1070,7 @@ paths:
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_reset
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset
summary: Reset a process instance to an earlier step
tags:
- Process Instances
@ -1108,7 +1097,7 @@ paths:
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_list
summary: Returns all process instance reports for process model
tags:
- Process Instances
@ -1122,7 +1111,7 @@ paths:
items:
$ref: "#/components/schemas/Workflow"
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_create
summary: Returns all process instance reports for process model
tags:
- Process Instances
@ -1136,7 +1125,7 @@ paths:
/process-instances/reports/columns:
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list
summary: Returns all available columns for a process instance report.
tags:
- Process Instances
@ -1171,7 +1160,7 @@ paths:
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_show
summary: Returns a report of process instances for a given process model
tags:
- Process Instances
@ -1185,7 +1174,7 @@ paths:
items:
$ref: "#/components/schemas/Workflow"
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_update
summary: Updates a process instance report
tags:
- Process Instances
@ -1197,7 +1186,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_delete
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_delete
summary: Delete a process instance report
tags:
- Process Instances
@ -1224,7 +1213,7 @@ paths:
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_show
summary: Returns metadata about the file
tags:
- Process Model Files
@ -1236,7 +1225,7 @@ paths:
schema:
$ref: "#/components/schemas/File"
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_update
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_update
summary: save the contents to the given file
tags:
- Process Model Files
@ -1259,7 +1248,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_delete
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_delete
summary: Removes an existing process model file
tags:
- Process Model Files
@ -1288,8 +1277,7 @@ paths:
get:
tags:
- Tasks
# security: []
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_my_tasks
operationId: spiffworkflow_backend.routes.tasks_controller.task_list_my_tasks
summary: returns the list of ready or waiting tasks for a user
responses:
"200":
@ -1318,7 +1306,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_open_processes
operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_open_processes
summary: returns the list of tasks for given user's open process instances
responses:
"200":
@ -1347,7 +1335,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_me
operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_me
summary: returns the list of tasks for given user's open process instances
responses:
"200":
@ -1382,7 +1370,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_groups
operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_groups
summary: returns the list of tasks for given user's open process instances
responses:
"200":
@ -1439,7 +1427,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_with_task_data
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_with_task_data
summary: returns the list of all user tasks associated with process instance with the task data
responses:
"200":
@ -1581,7 +1569,7 @@ paths:
get:
tags:
- Service Tasks
operationId: spiffworkflow_backend.routes.process_api_blueprint.service_task_list
operationId: spiffworkflow_backend.routes.service_tasks_controller.service_task_list
summary: Gets all available service task connectors
responses:
"200":
@ -1595,7 +1583,7 @@ paths:
get:
tags:
- Authentications
operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_list
operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_list
summary: Gets all available authentications from connector proxy
responses:
"200":
@ -1632,11 +1620,9 @@ paths:
schema:
type: string
get:
# disable security so we can get the token from query params instead
security: []
tags:
- Authentications
operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_callback
operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_callback
summary: Callback to backend
responses:
"200":
@ -1669,7 +1655,7 @@ paths:
get:
tags:
- Tasks
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_show
operationId: spiffworkflow_backend.routes.tasks_controller.task_show
summary: Gets one task that a user wants to complete
responses:
"200":
@ -1681,7 +1667,7 @@ paths:
put:
tags:
- Tasks
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_submit
operationId: spiffworkflow_backend.routes.tasks_controller.task_submit
summary: Update the form data for a tasks
requestBody:
content:
@ -1725,7 +1711,7 @@ paths:
get:
tags:
- Messages
operationId: spiffworkflow_backend.routes.process_api_blueprint.message_instance_list
operationId: spiffworkflow_backend.routes.messages_controller.message_instance_list
summary: Get a list of message instances
responses:
"200":
@ -1746,7 +1732,7 @@ paths:
post:
tags:
- Messages
operationId: spiffworkflow_backend.routes.process_api_blueprint.message_start
operationId: spiffworkflow_backend.routes.messages_controller.message_start
summary: Instantiate and run a given process model with a message start event matching given identifier
requestBody:
content:
@ -1790,7 +1776,7 @@ paths:
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_log_list
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_log_list
summary: returns a list of logs associated with the process instance
responses:
"200":
@ -1815,7 +1801,7 @@ paths:
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_create
operationId: spiffworkflow_backend.routes.secrets_controller.secret_create
summary: Create a secret for a key and value
tags:
- Secrets
@ -1832,7 +1818,7 @@ paths:
schema:
type: number
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_list
operationId: spiffworkflow_backend.routes.secrets_controller.secret_list
summary: Return list of all secrets
tags:
- Secrets
@ -1853,7 +1839,7 @@ paths:
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.get_secret
operationId: spiffworkflow_backend.routes.secrets_controller.secret_show
summary: Return a secret value for a key
tags:
- Secrets
@ -1865,7 +1851,7 @@ paths:
schema:
$ref: "#/components/schemas/Secret"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_delete
operationId: spiffworkflow_backend.routes.secrets_controller.secret_delete
summary: Delete an existing secret
tags:
- Secrets
@ -1877,7 +1863,7 @@ paths:
"404":
description: Secret does not exist
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_update
operationId: spiffworkflow_backend.routes.secrets_controller.secret_update
summary: Modify an existing secret
tags:
- Secrets
@ -1936,16 +1922,6 @@ components:
scopes:
read_email: read email
x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope
# oAuth2AuthCode:
# type: oauth2
# description: authenticate with openid server
# flows:
# implicit:
# authorizationUrl: /v1.0/login_api
# scopes:
# uid: uid
# x-tokenInfoUrl: localhost:7000/v1.0/login_api_return
# x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope
schemas:
OkTrue:

View File

@ -0,0 +1,13 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
import flask.wrappers
from flask.wrappers import Response
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
def status() -> flask.wrappers.Response:
"""Status."""
ProcessInstanceModel.query.filter().first()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")

View File

@ -0,0 +1,170 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Any
from typing import Dict
from typing import Optional
import flask.wrappers
from flask import g
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_model import MessageModel
from spiffworkflow_backend.models.message_triggerable_process_model import (
MessageTriggerableProcessModel,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.routes.process_api_blueprint import (
_find_process_instance_by_id_or_raise,
)
from spiffworkflow_backend.services.message_service import MessageService
def message_instance_list(
process_instance_id: Optional[int] = None,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response:
"""Message_instance_list."""
# to make sure the process instance exists
message_instances_query = MessageInstanceModel.query
if process_instance_id:
message_instances_query = message_instances_query.filter_by(
process_instance_id=process_instance_id
)
message_instances = (
message_instances_query.order_by(
MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
MessageInstanceModel.id.desc(), # type: ignore
)
.join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id)
.join(ProcessInstanceModel)
.add_columns(
MessageModel.identifier.label("message_identifier"),
ProcessInstanceModel.process_model_identifier,
ProcessInstanceModel.process_model_display_name,
)
.paginate(page=page, per_page=per_page, error_out=False)
)
for message_instance in message_instances:
message_correlations: dict = {}
for (
mcmi
) in (
message_instance.MessageInstanceModel.message_correlations_message_instances
):
mc = MessageCorrelationModel.query.filter_by(
id=mcmi.message_correlation_id
).all()
for m in mc:
if m.name not in message_correlations:
message_correlations[m.name] = {}
message_correlations[m.name][
m.message_correlation_property.identifier
] = m.value
message_instance.MessageInstanceModel.message_correlations = (
message_correlations
)
response_json = {
"results": message_instances.items,
"pagination": {
"count": len(message_instances.items),
"total": message_instances.total,
"pages": message_instances.pages,
},
}
return make_response(jsonify(response_json), 200)
# body: {
# payload: dict,
# process_instance_id: Optional[int],
# }
def message_start(
message_identifier: str,
body: Dict[str, Any],
) -> flask.wrappers.Response:
"""Message_start."""
message_model = MessageModel.query.filter_by(identifier=message_identifier).first()
if message_model is None:
raise (
ApiError(
error_code="unknown_message",
message=f"Could not find message with identifier: {message_identifier}",
status_code=404,
)
)
if "payload" not in body:
raise (
ApiError(
error_code="missing_payload",
message="Body is missing payload.",
status_code=400,
)
)
process_instance = None
if "process_instance_id" in body:
# to make sure we have a valid process_instance_id
process_instance = _find_process_instance_by_id_or_raise(
body["process_instance_id"]
)
message_instance = MessageInstanceModel.query.filter_by(
process_instance_id=process_instance.id,
message_model_id=message_model.id,
message_type="receive",
status="ready",
).first()
if message_instance is None:
raise (
ApiError(
error_code="cannot_find_waiting_message",
message=f"Could not find waiting message for identifier {message_identifier} "
f"and process instance {process_instance.id}",
status_code=400,
)
)
MessageService.process_message_receive(
message_instance, message_model.name, body["payload"]
)
else:
message_triggerable_process_model = (
MessageTriggerableProcessModel.query.filter_by(
message_model_id=message_model.id
).first()
)
if message_triggerable_process_model is None:
raise (
ApiError(
error_code="cannot_start_message",
message=f"Message with identifier cannot be start with message: {message_identifier}",
status_code=400,
)
)
process_instance = MessageService.process_message_triggerable_process_model(
message_triggerable_process_model,
message_model.name,
body["payload"],
g.user,
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
)

View File

@ -0,0 +1,129 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Any
from typing import Optional
import flask.wrappers
from flask import g
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
)
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
from spiffworkflow_backend.routes.process_api_blueprint import (
_un_modify_modified_process_model_id,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
def process_group_create(body: dict) -> flask.wrappers.Response:
"""Add_process_group."""
process_group = ProcessGroup(**body)
ProcessModelService.add_process_group(process_group)
_commit_and_push_to_git(
f"User: {g.user.username} added process group {process_group.id}"
)
return make_response(jsonify(process_group), 201)
def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
"""Process_group_delete."""
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
ProcessModelService().process_group_delete(process_group_id)
_commit_and_push_to_git(
f"User: {g.user.username} deleted process group {process_group_id}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_group_update(
modified_process_group_id: str, body: dict
) -> flask.wrappers.Response:
"""Process Group Update."""
body_include_list = ["display_name", "description"]
body_filtered = {
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
process_group = ProcessGroup(id=process_group_id, **body_filtered)
ProcessModelService.update_process_group(process_group)
_commit_and_push_to_git(
f"User: {g.user.username} updated process group {process_group_id}"
)
return make_response(jsonify(process_group), 200)
def process_group_list(
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Process_group_list."""
if process_group_identifier is not None:
process_groups = ProcessModelService.get_process_groups(
process_group_identifier
)
else:
process_groups = ProcessModelService.get_process_groups()
batch = ProcessModelService().get_batch(
items=process_groups, page=page, per_page=per_page
)
pages = len(process_groups) // per_page
remainder = len(process_groups) % per_page
if remainder > 0:
pages += 1
response_json = {
"results": ProcessGroupSchema(many=True).dump(batch),
"pagination": {
"count": len(batch),
"total": len(process_groups),
"pages": pages,
},
}
return Response(json.dumps(response_json), status=200, mimetype="application/json")
def process_group_show(
modified_process_group_id: str,
) -> Any:
"""Process_group_show."""
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
try:
process_group = ProcessModelService.get_process_group(process_group_id)
except ProcessEntityNotFoundError as exception:
raise (
ApiError(
error_code="process_group_cannot_be_found",
message=f"Process group cannot be found: {process_group_id}",
status_code=400,
)
) from exception
process_group.parent_groups = ProcessModelService.get_parent_group_array(
process_group.id
)
return make_response(jsonify(process_group), 200)
def process_group_move(
modified_process_group_identifier: str, new_location: str
) -> flask.wrappers.Response:
"""Process_group_move."""
original_process_group_id = _un_modify_modified_process_model_id(
modified_process_group_identifier
)
new_process_group = ProcessModelService().process_group_move(
original_process_group_id, new_location
)
_commit_and_push_to_git(
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
)
return make_response(jsonify(new_process_group), 200)

View File

@ -0,0 +1,686 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Any
from typing import Dict
from typing import Optional
import flask.wrappers
from flask import current_app
from flask import g
from flask import jsonify
from flask import make_response
from flask import request
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from SpiffWorkflow.task import TaskState # type: ignore
from sqlalchemy import and_
from sqlalchemy import or_
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceCannotBeDeletedError,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import (
_find_process_instance_by_id_or_raise,
)
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
from spiffworkflow_backend.routes.process_api_blueprint import (
_un_modify_modified_process_model_id,
)
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportFilter,
)
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportService,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
def process_instance_create(
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Create_process_instance."""
process_model_identifier = _un_modify_modified_process_model_id(
modified_process_model_identifier
)
process_instance = (
ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model_identifier, g.user
)
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=201,
mimetype="application/json",
)
def process_instance_run(
modified_process_model_identifier: str,
process_instance_id: int,
do_engine_steps: bool = True,
) -> flask.wrappers.Response:
"""Process_instance_run."""
process_instance = ProcessInstanceService().get_process_instance(
process_instance_id
)
if process_instance.status != "not_started":
raise ApiError(
error_code="process_instance_not_runnable",
message=f"Process Instance ({process_instance.id}) is currently running or has already run.",
status_code=400,
)
processor = ProcessInstanceProcessor(process_instance)
if do_engine_steps:
try:
processor.do_engine_steps(save=True)
except ApiError as e:
ErrorHandlingService().handle_error(processor, e)
raise e
except Exception as e:
ErrorHandlingService().handle_error(processor, e)
task = processor.bpmn_process_instance.last_task
raise ApiError.from_task(
error_code="unknown_exception",
message=f"An unknown error occurred. Original error: {e}",
status_code=400,
task=task,
) from e
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
MessageService.process_message_instances()
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
processor
)
process_instance_data = processor.get_data()
process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
process_instance_metadata["data"] = process_instance_data
return Response(
json.dumps(process_instance_metadata), status=200, mimetype="application/json"
)
def process_instance_terminate(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_run."""
process_instance = ProcessInstanceService().get_process_instance(
process_instance_id
)
processor = ProcessInstanceProcessor(process_instance)
processor.terminate()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_suspend(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_suspend."""
process_instance = ProcessInstanceService().get_process_instance(
process_instance_id
)
processor = ProcessInstanceProcessor(process_instance)
processor.suspend()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_resume(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_resume."""
process_instance = ProcessInstanceService().get_process_instance(
process_instance_id
)
processor = ProcessInstanceProcessor(process_instance)
processor.resume()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_log_list(
modified_process_model_identifier: str,
process_instance_id: int,
page: int = 1,
per_page: int = 100,
detailed: bool = False,
) -> flask.wrappers.Response:
"""Process_instance_log_list."""
# to make sure the process instance exists
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
log_query = SpiffLoggingModel.query.filter(
SpiffLoggingModel.process_instance_id == process_instance.id
)
if not detailed:
log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
logs = (
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
.join(
UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True
) # isouter since if we don't have a user, we still want the log
.add_columns(
UserModel.username,
)
.paginate(page=page, per_page=per_page, error_out=False)
)
response_json = {
"results": logs.items,
"pagination": {
"count": len(logs.items),
"total": logs.total,
"pages": logs.pages,
},
}
return make_response(jsonify(response_json), 200)
def process_instance_list_for_me(
process_model_identifier: Optional[str] = None,
page: int = 1,
per_page: int = 100,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list_for_me."""
return process_instance_list(
process_model_identifier=process_model_identifier,
page=page,
per_page=per_page,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
user_filter=user_filter,
report_identifier=report_identifier,
report_id=report_id,
user_group_identifier=user_group_identifier,
with_relation_to_me=True,
)
def process_instance_list(
process_model_identifier: Optional[str] = None,
page: int = 1,
per_page: int = 100,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
with_relation_to_me: Optional[bool] = None,
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list."""
process_instance_report = ProcessInstanceReportService.report_with_identifier(
g.user, report_id, report_identifier
)
if user_filter:
report_filter = ProcessInstanceReportFilter(
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
with_relation_to_me=with_relation_to_me,
process_status=process_status.split(",") if process_status else None,
)
else:
report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
with_relation_to_me=with_relation_to_me,
)
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
page=page,
per_page=per_page,
user=g.user,
)
return make_response(jsonify(response_json), 200)
def process_instance_report_column_list() -> flask.wrappers.Response:
"""Process_instance_report_column_list."""
table_columns = ProcessInstanceReportService.builtin_column_options()
columns_for_metadata = (
db.session.query(ProcessInstanceMetadataModel.key)
.order_by(ProcessInstanceMetadataModel.key)
.distinct() # type: ignore
.all()
)
columns_for_metadata_strings = [
{"Header": i[0], "accessor": i[0], "filterable": True}
for i in columns_for_metadata
]
return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
def process_instance_show_for_me(
modified_process_model_identifier: str,
process_instance_id: int,
process_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_show_for_me."""
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
return _get_process_instance(
process_instance=process_instance,
modified_process_model_identifier=modified_process_model_identifier,
process_identifier=process_identifier,
)
def process_instance_show(
modified_process_model_identifier: str,
process_instance_id: int,
process_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Create_process_instance."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return _get_process_instance(
process_instance=process_instance,
modified_process_model_identifier=modified_process_model_identifier,
process_identifier=process_identifier,
)
def process_instance_delete(
process_instance_id: int, modified_process_model_identifier: str
) -> flask.wrappers.Response:
"""Create_process_instance."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if not process_instance.has_terminal_status():
raise ProcessInstanceCannotBeDeletedError(
f"Process instance ({process_instance.id}) cannot be deleted since it does not have a terminal status. "
f"Current status is {process_instance.status}."
)
# (Pdb) db.session.delete
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
db.session.query(SpiffLoggingModel).filter_by(
process_instance_id=process_instance.id
).delete()
db.session.query(SpiffStepDetailsModel).filter_by(
process_instance_id=process_instance.id
).delete()
db.session.delete(process_instance)
db.session.commit()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_report_list(
page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Process_instance_report_list."""
process_instance_reports = ProcessInstanceReportModel.query.filter_by(
created_by_id=g.user.id,
).all()
return make_response(jsonify(process_instance_reports), 200)
def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.create_report(
identifier=body["identifier"],
user=g.user,
report_metadata=body["report_metadata"],
)
return make_response(jsonify(process_instance_report), 201)
def process_instance_report_update(
report_id: int,
body: Dict[str, Any],
) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
error_code="unknown_process_instance_report",
message="Unknown process instance report",
status_code=404,
)
process_instance_report.report_metadata = body["report_metadata"]
db.session.commit()
return make_response(jsonify(process_instance_report), 201)
def process_instance_report_delete(
report_id: int,
) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
error_code="unknown_process_instance_report",
message="Unknown process instance report",
status_code=404,
)
db.session.delete(process_instance_report)
db.session.commit()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_report_show(
report_id: int,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response:
"""Process_instance_report_show."""
process_instances = ProcessInstanceModel.query.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(page=page, per_page=per_page, error_out=False)
process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
error_code="unknown_process_instance_report",
message="Unknown process instance report",
status_code=404,
)
substitution_variables = request.args.to_dict()
result_dict = process_instance_report.generate_report(
process_instances.items, substitution_variables
)
# update this if we go back to a database query instead of filtering in memory
result_dict["pagination"] = {
"count": len(result_dict["results"]),
"total": len(result_dict["results"]),
"pages": 1,
}
return Response(json.dumps(result_dict), status=200, mimetype="application/json")
def process_instance_task_list_without_task_data_for_me(
modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data_for_me."""
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
return process_instance_task_list(
modified_process_model_identifier,
process_instance,
all_tasks,
spiff_step,
get_task_data=False,
)
def process_instance_task_list_without_task_data(
modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return process_instance_task_list(
modified_process_model_identifier,
process_instance,
all_tasks,
spiff_step,
get_task_data=False,
)
def process_instance_task_list_with_task_data(
modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_task_list_with_task_data."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return process_instance_task_list(
modified_process_model_identifier,
process_instance,
all_tasks,
spiff_step,
get_task_data=True,
)
def process_instance_task_list(
_modified_process_model_identifier: str,
process_instance: ProcessInstanceModel,
all_tasks: bool = False,
spiff_step: int = 0,
get_task_data: bool = False,
) -> flask.wrappers.Response:
"""Process_instance_task_list."""
if spiff_step > 0:
step_detail = (
db.session.query(SpiffStepDetailsModel)
.filter(
SpiffStepDetailsModel.process_instance_id == process_instance.id,
SpiffStepDetailsModel.spiff_step == spiff_step,
)
.first()
)
if step_detail is not None and process_instance.bpmn_json is not None:
bpmn_json = json.loads(process_instance.bpmn_json)
bpmn_json["tasks"] = step_detail.task_json["tasks"]
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
process_instance.bpmn_json = json.dumps(bpmn_json)
processor = ProcessInstanceProcessor(process_instance)
spiff_tasks = None
if all_tasks:
spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
else:
spiff_tasks = processor.get_all_user_tasks()
tasks = []
for spiff_task in spiff_tasks:
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
if get_task_data:
task.data = spiff_task.data
tasks.append(task)
return make_response(jsonify(tasks), 200)
def process_instance_reset(
process_instance_id: int,
modified_process_model_identifier: str,
spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_reset."""
process_instance = ProcessInstanceService().get_process_instance(
process_instance_id
)
step_detail = (
db.session.query(SpiffStepDetailsModel)
.filter(
SpiffStepDetailsModel.process_instance_id == process_instance.id,
SpiffStepDetailsModel.spiff_step == spiff_step,
)
.first()
)
if step_detail is not None and process_instance.bpmn_json is not None:
bpmn_json = json.loads(process_instance.bpmn_json)
bpmn_json["tasks"] = step_detail.task_json["tasks"]
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
process_instance.bpmn_json = json.dumps(bpmn_json)
db.session.add(process_instance)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
raise ApiError(
error_code="reset_process_instance_error",
message=f"Could not update the Instance. Original error is {e}",
) from e
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
)
def _get_process_instance(
modified_process_model_identifier: str,
process_instance: ProcessInstanceModel,
process_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""_get_process_instance."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
try:
current_version_control_revision = GitService.get_current_revision()
except GitCommandError:
current_version_control_revision = ""
process_model_with_diagram = None
name_of_file_with_diagram = None
if process_identifier:
spec_reference = SpecReferenceCache.query.filter_by(
identifier=process_identifier, type="process"
).first()
if spec_reference is None:
raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {process_identifier}"
)
process_model_with_diagram = ProcessModelService.get_process_model(
spec_reference.process_model_id
)
name_of_file_with_diagram = spec_reference.file_name
else:
process_model_with_diagram = _get_process_model(process_model_identifier)
if process_model_with_diagram.primary_file_name:
name_of_file_with_diagram = process_model_with_diagram.primary_file_name
if process_model_with_diagram and name_of_file_with_diagram:
if (
process_instance.bpmn_version_control_identifier
== current_version_control_revision
):
bpmn_xml_file_contents = SpecFileService.get_data(
process_model_with_diagram, name_of_file_with_diagram
).decode("utf-8")
else:
bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
process_model_with_diagram,
process_instance.bpmn_version_control_identifier,
file_name=name_of_file_with_diagram,
)
process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
return make_response(jsonify(process_instance), 200)
def _find_process_instance_for_me_or_raise(
process_instance_id: int,
) -> ProcessInstanceModel:
"""_find_process_instance_for_me_or_raise."""
process_instance: ProcessInstanceModel = (
ProcessInstanceModel.query.filter_by(id=process_instance_id)
.outerjoin(HumanTaskModel)
.outerjoin(
HumanTaskUserModel,
and_(
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
HumanTaskUserModel.user_id == g.user.id,
),
)
.filter(
or_(
HumanTaskUserModel.id.is_not(None),
ProcessInstanceModel.process_initiator_id == g.user.id,
)
)
.first()
)
if process_instance is None:
raise (
ApiError(
error_code="process_instance_cannot_be_found",
message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.",
status_code=400,
)
)
return process_instance

View File

@ -0,0 +1,315 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Any
from typing import Dict
from typing import Optional
from typing import Union
import connexion # type: ignore
import flask.wrappers
from flask import current_app
from flask import g
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from spiffworkflow_backend.models.file import FileSchema
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
from spiffworkflow_backend.routes.process_api_blueprint import (
_un_modify_modified_process_model_id,
)
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.git_service import MissingGitConfigsError
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
def process_model_create(
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response:
"""Process_model_create."""
body_include_list = [
"id",
"display_name",
"primary_file_name",
"primary_process_id",
"description",
"metadata_extraction_paths",
]
body_filtered = {
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
if modified_process_group_id is None:
raise ApiError(
error_code="process_group_id_not_specified",
message="Process Model could not be created when process_group_id path param is unspecified",
status_code=400,
)
unmodified_process_group_id = _un_modify_modified_process_model_id(
modified_process_group_id
)
process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
if process_group is None:
raise ApiError(
error_code="process_model_could_not_be_created",
message=f"Process Model could not be created from given body because Process Group could not be found: {body}",
status_code=400,
)
process_model_info = ProcessModelInfo(**body_filtered) # type: ignore
if process_model_info is None:
raise ApiError(
error_code="process_model_could_not_be_created",
message=f"Process Model could not be created from given body: {body}",
status_code=400,
)
ProcessModelService.add_process_model(process_model_info)
_commit_and_push_to_git(
f"User: {g.user.username} created process model {process_model_info.id}"
)
return Response(
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
status=201,
mimetype="application/json",
)
def process_model_delete(
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
ProcessModelService().process_model_delete(process_model_identifier)
_commit_and_push_to_git(
f"User: {g.user.username} deleted process model {process_model_identifier}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_model_update(
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> Any:
"""Process_model_update."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
body_include_list = [
"display_name",
"primary_file_name",
"primary_process_id",
"description",
"metadata_extraction_paths",
]
body_filtered = {
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
process_model = _get_process_model(process_model_identifier)
ProcessModelService.update_process_model(process_model, body_filtered)
_commit_and_push_to_git(
f"User: {g.user.username} updated process model {process_model_identifier}"
)
return ProcessModelInfoSchema().dump(process_model)
def process_model_show(modified_process_model_identifier: str) -> Any:
"""Process_model_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
files = sorted(
SpecFileService.get_files(process_model),
key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
)
process_model.files = files
for file in process_model.files:
file.references = SpecFileService.get_references_for_file(file, process_model)
process_model.parent_groups = ProcessModelService.get_parent_group_array(
process_model.id
)
return make_response(jsonify(process_model), 200)
def process_model_move(
modified_process_model_identifier: str, new_location: str
) -> flask.wrappers.Response:
"""Process_model_move."""
original_process_model_id = _un_modify_modified_process_model_id(
modified_process_model_identifier
)
new_process_model = ProcessModelService().process_model_move(
original_process_model_id, new_location
)
_commit_and_push_to_git(
f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
)
return make_response(jsonify(new_process_model), 200)
def process_model_publish(
modified_process_model_identifier: str, branch_to_update: Optional[str] = None
) -> flask.wrappers.Response:
"""Process_model_publish."""
if branch_to_update is None:
branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
if branch_to_update is None:
raise MissingGitConfigsError(
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
"This is required for publishing process models"
)
process_model_identifier = _un_modify_modified_process_model_id(
modified_process_model_identifier
)
pr_url = GitService().publish(process_model_identifier, branch_to_update)
data = {"ok": True, "pr_url": pr_url}
return Response(json.dumps(data), status=200, mimetype="application/json")
def process_model_list(
process_group_identifier: Optional[str] = None,
recursive: Optional[bool] = False,
filter_runnable_by_user: Optional[bool] = False,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response:
"""Process model list!"""
process_models = ProcessModelService.get_process_models(
process_group_id=process_group_identifier,
recursive=recursive,
filter_runnable_by_user=filter_runnable_by_user,
)
batch = ProcessModelService().get_batch(
process_models, page=page, per_page=per_page
)
pages = len(process_models) // per_page
remainder = len(process_models) % per_page
if remainder > 0:
pages += 1
response_json = {
"results": ProcessModelInfoSchema(many=True).dump(batch),
"pagination": {
"count": len(batch),
"total": len(process_models),
"pages": pages,
},
}
return Response(json.dumps(response_json), status=200, mimetype="application/json")
def process_model_file_update(
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_update."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
request_file = _get_file_from_request()
request_file_contents = request_file.stream.read()
if not request_file_contents:
raise ApiError(
error_code="file_contents_empty",
message="Given request file does not have any content",
status_code=400,
)
SpecFileService.update_file(process_model, file_name, request_file_contents)
_commit_and_push_to_git(
f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_model_file_delete(
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
try:
SpecFileService.delete_file(process_model, file_name)
except FileNotFoundError as exception:
raise (
ApiError(
error_code="process_model_file_cannot_be_found",
message=f"Process model file cannot be found: {file_name}",
status_code=400,
)
) from exception
_commit_and_push_to_git(
f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_model_file_create(
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_model_file_create."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
request_file = _get_file_from_request()
if not request_file.filename:
raise ApiError(
error_code="could_not_get_filename",
message="Could not get filename from request",
status_code=400,
)
file = SpecFileService.add_file(
process_model, request_file.filename, request_file.stream.read()
)
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
_commit_and_push_to_git(
f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}"
)
return Response(
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
)
def process_model_file_show(
modified_process_model_identifier: str, file_name: str
) -> Any:
"""Process_model_file_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
files = SpecFileService.get_files(process_model, file_name)
if len(files) == 0:
raise ApiError(
error_code="unknown file",
message=f"No information exists for file {file_name}"
f" it does not exist in workflow {process_model_identifier}.",
status_code=404,
)
file = files[0]
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
# file.process_group_id = process_model.process_group_id
return FileSchema().dump(file)
def _get_file_from_request() -> Any:
"""Get_file_from_request."""
request_file = connexion.request.files.get("file")
if not request_file:
raise ApiError(
error_code="no_file_given",
message="Given request does not contain a file",
status_code=400,
)
return request_file

View File

@ -0,0 +1,131 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
import random
import string
from typing import Dict
from typing import Union
import flask.wrappers
from flask import current_app
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from lxml import etree # type: ignore
from lxml.builder import ElementMaker # type: ignore
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
from spiffworkflow_backend.routes.process_api_blueprint import (
_get_required_parameter_or_raise,
)
from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
from spiffworkflow_backend.services.spec_file_service import SpecFileService
def script_unit_test_create(
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response:
"""Script_unit_test_create."""
bpmn_task_identifier = _get_required_parameter_or_raise(
"bpmn_task_identifier", body
)
input_json = _get_required_parameter_or_raise("input_json", body)
expected_output_json = _get_required_parameter_or_raise(
"expected_output_json", body
)
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
if file is None:
raise ApiError(
error_code="cannot_find_file",
message=f"Could not find the primary bpmn file for process_model: {process_model.id}",
status_code=404,
)
# TODO: move this to an xml service or something
file_contents = SpecFileService.get_data(process_model, file.name)
bpmn_etree_element = etree.fromstring(file_contents)
nsmap = bpmn_etree_element.nsmap
spiff_element_maker = ElementMaker(
namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
)
script_task_elements = bpmn_etree_element.xpath(
f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
)
if len(script_task_elements) == 0:
raise ApiError(
error_code="missing_script_task",
message=f"Cannot find a script task with id: {bpmn_task_identifier}",
status_code=404,
)
script_task_element = script_task_elements[0]
extension_elements = None
extension_elements_array = script_task_element.xpath(
".//bpmn:extensionElements",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
)
if len(extension_elements_array) == 0:
bpmn_element_maker = ElementMaker(
namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
)
extension_elements = bpmn_element_maker("extensionElements")
script_task_element.append(extension_elements)
else:
extension_elements = extension_elements_array[0]
unit_test_elements = None
unit_test_elements_array = extension_elements.xpath(
"//spiffworkflow:unitTests",
namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"},
)
if len(unit_test_elements_array) == 0:
unit_test_elements = spiff_element_maker("unitTests")
extension_elements.append(unit_test_elements)
else:
unit_test_elements = unit_test_elements_array[0]
fuzz = "".join(
random.choice(string.ascii_uppercase + string.digits) # noqa: S311
for _ in range(7)
)
unit_test_id = f"unit_test_{fuzz}"
input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
expected_output_json_element = spiff_element_maker(
"expectedOutputJson", json.dumps(expected_output_json)
)
unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
unit_test_element.append(input_json_element)
unit_test_element.append(expected_output_json_element)
unit_test_elements.append(unit_test_element)
SpecFileService.update_file(
process_model, file.name, etree.tostring(bpmn_etree_element)
)
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
def script_unit_test_run(
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response:
"""Script_unit_test_run."""
# FIXME: We should probably clear this somewhere else but this works
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
python_script = _get_required_parameter_or_raise("python_script", body)
input_json = _get_required_parameter_or_raise("input_json", body)
expected_output_json = _get_required_parameter_or_raise(
"expected_output_json", body
)
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
python_script, input_json, expected_output_json
)
return make_response(jsonify(result), 200)

View File

@ -0,0 +1,67 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Dict
from typing import Optional
from flask import g
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from spiffworkflow_backend.models.secret_model import SecretModel
from spiffworkflow_backend.models.secret_model import SecretModelSchema
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.secret_service import SecretService
from spiffworkflow_backend.services.user_service import UserService
def secret_show(key: str) -> Optional[str]:
"""Secret_show."""
return SecretService.get_secret(key)
def secret_list(
page: int = 1,
per_page: int = 100,
) -> Response:
"""Secret_list."""
secrets = (
SecretModel.query.order_by(SecretModel.key)
.join(UserModel)
.add_columns(
UserModel.username,
)
.paginate(page=page, per_page=per_page, error_out=False)
)
response_json = {
"results": secrets.items,
"pagination": {
"count": len(secrets.items),
"total": secrets.total,
"pages": secrets.pages,
},
}
return make_response(jsonify(response_json), 200)
def secret_create(body: Dict) -> Response:
"""Add secret."""
secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
return Response(
json.dumps(SecretModelSchema().dump(secret_model)),
status=201,
mimetype="application/json",
)
def secret_update(key: str, body: dict) -> Response:
"""Update secret."""
SecretService().update_secret(key, body["value"], g.user.id)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def secret_delete(key: str) -> Response:
"""Delete secret."""
current_user = UserService.current_user()
SecretService.delete_secret(key, current_user.id)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")

View File

@ -0,0 +1,49 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
import flask.wrappers
import werkzeug
from flask import current_app
from flask import g
from flask import redirect
from flask import request
from flask.wrappers import Response
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.secret_service import SecretService
from spiffworkflow_backend.services.service_task_service import ServiceTaskService
def service_task_list() -> flask.wrappers.Response:
"""Service_task_list."""
available_connectors = ServiceTaskService.available_connectors()
return Response(
json.dumps(available_connectors), status=200, mimetype="application/json"
)
def authentication_list() -> flask.wrappers.Response:
"""Authentication_list."""
available_authentications = ServiceTaskService.authentication_list()
response_json = {
"results": available_authentications,
"connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"],
"redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
}
return Response(json.dumps(response_json), status=200, mimetype="application/json")
def authentication_callback(
service: str,
auth_method: str,
) -> werkzeug.wrappers.Response:
"""Authentication_callback."""
verify_token(request.args.get("token"), force_run=True)
response = request.args["response"]
SecretService().update_secret(
f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True
)
return redirect(
f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration"
)

View File

@ -0,0 +1,525 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
import os
import uuid
from typing import Any
from typing import Dict
from typing import Optional
from typing import TypedDict
from typing import Union
import flask.wrappers
import jinja2
from flask import g
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from flask_bpmn.api.api_error import ApiError
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from sqlalchemy import and_
from sqlalchemy import asc
from sqlalchemy import desc
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import (
_find_principal_or_raise,
)
from spiffworkflow_backend.routes.process_api_blueprint import (
_find_process_instance_by_id_or_raise,
)
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
class TaskDataSelectOption(TypedDict):
"""TaskDataSelectOption."""
value: str
label: str
class ReactJsonSchemaSelectOption(TypedDict):
"""ReactJsonSchemaSelectOption."""
type: str
title: str
enum: list[str]
# TODO: see comment for before_request
# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Task_list_my_tasks."""
principal = _find_principal_or_raise()
human_tasks = (
HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore
.join(ProcessInstanceModel)
.join(HumanTaskUserModel)
.filter_by(user_id=principal.user_id)
.filter(HumanTaskModel.completed == False) # noqa: E712
# just need this add_columns to add the process_model_identifier. Then add everything back that was removed.
.add_columns(
ProcessInstanceModel.process_model_identifier,
ProcessInstanceModel.process_model_display_name,
ProcessInstanceModel.status,
HumanTaskModel.task_name,
HumanTaskModel.task_title,
HumanTaskModel.task_type,
HumanTaskModel.task_status,
HumanTaskModel.task_id,
HumanTaskModel.id,
HumanTaskModel.process_model_display_name,
HumanTaskModel.process_instance_id,
)
.paginate(page=page, per_page=per_page, error_out=False)
)
tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items]
response_json = {
"results": tasks,
"pagination": {
"count": len(human_tasks.items),
"total": human_tasks.total,
"pages": human_tasks.pages,
},
}
return make_response(jsonify(response_json), 200)
def task_list_for_my_open_processes(
page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_for_my_open_processes."""
return _get_tasks(page=page, per_page=per_page)
def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Task_list_for_me."""
return _get_tasks(
processes_started_by_user=False,
has_lane_assignment_id=False,
page=page,
per_page=per_page,
)
def task_list_for_my_groups(
user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_for_my_groups."""
return _get_tasks(
user_group_identifier=user_group_identifier,
processes_started_by_user=False,
page=page,
per_page=per_page,
)
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
"""Task_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if process_instance.status == ProcessInstanceStatus.suspended.value:
raise ApiError(
error_code="error_suspended",
message="The process instance is suspended",
status_code=400,
)
process_model = _get_process_model(
process_instance.process_model_identifier,
)
form_schema_file_name = ""
form_ui_schema_file_name = ""
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance)
extensions = spiff_task.task_spec.extensions
if "properties" in extensions:
properties = extensions["properties"]
if "formJsonSchemaFilename" in properties:
form_schema_file_name = properties["formJsonSchemaFilename"]
if "formUiSchemaFilename" in properties:
form_ui_schema_file_name = properties["formUiSchemaFilename"]
processor = ProcessInstanceProcessor(process_instance)
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
task.data = spiff_task.data
task.process_model_display_name = process_model.display_name
task.process_model_identifier = process_model.id
process_model_with_form = process_model
refs = SpecFileService.get_references_for_process(process_model_with_form)
all_processes = [i.identifier for i in refs]
if task.process_identifier not in all_processes:
bpmn_file_full_path = (
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
task.process_identifier
)
)
relative_path = os.path.relpath(
bpmn_file_full_path, start=FileSystemService.root_path()
)
process_model_relative_path = os.path.dirname(relative_path)
process_model_with_form = (
ProcessModelService.get_process_model_from_relative_path(
process_model_relative_path
)
)
if task.type == "User Task":
if not form_schema_file_name:
raise (
ApiError(
error_code="missing_form_file",
message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}",
status_code=400,
)
)
form_contents = _prepare_form_data(
form_schema_file_name,
task.data,
process_model_with_form,
)
try:
# form_contents is a str
form_dict = json.loads(form_contents)
except Exception as exception:
raise (
ApiError(
error_code="error_loading_form",
message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}",
status_code=400,
)
) from exception
if task.data:
_update_form_schema_with_task_data_as_needed(form_dict, task.data)
if form_contents:
task.form_schema = form_dict
if form_ui_schema_file_name:
ui_form_contents = _prepare_form_data(
form_ui_schema_file_name,
task.data,
process_model_with_form,
)
if ui_form_contents:
task.form_ui_schema = ui_form_contents
if task.properties and task.data and "instructionsForEndUser" in task.properties:
if task.properties["instructionsForEndUser"]:
task.properties["instructionsForEndUser"] = _render_jinja_template(
task.properties["instructionsForEndUser"], task.data
)
return make_response(jsonify(task), 200)
def process_data_show(
process_instance_id: int,
process_data_identifier: str,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_data_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance)
all_process_data = processor.get_data()
process_data_value = None
if process_data_identifier in all_process_data:
process_data_value = all_process_data[process_data_identifier]
return make_response(
jsonify(
{
"process_data_identifier": process_data_identifier,
"process_data_value": process_data_value,
}
),
200,
)
def task_submit(
process_instance_id: int,
task_id: str,
body: Dict[str, Any],
terminate_loop: bool = False,
) -> flask.wrappers.Response:
"""Task_submit_user_data."""
principal = _find_principal_or_raise()
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if not process_instance.can_submit_task():
raise ApiError(
error_code="process_instance_not_runnable",
message=f"Process Instance ({process_instance.id}) has status "
f"{process_instance.status} which does not allow tasks to be submitted.",
status_code=400,
)
processor = ProcessInstanceProcessor(process_instance)
spiff_task = _get_spiff_task_from_process_instance(
task_id, process_instance, processor=processor
)
AuthorizationService.assert_user_can_complete_spiff_task(
process_instance.id, spiff_task, principal.user
)
if spiff_task.state != TaskState.READY:
raise (
ApiError(
error_code="invalid_state",
message="You may not update a task unless it is in the READY state.",
status_code=400,
)
)
if terminate_loop and spiff_task.is_looping():
spiff_task.terminate_loop()
human_task = HumanTaskModel.query.filter_by(
process_instance_id=process_instance_id, task_id=task_id, completed=False
).first()
if human_task is None:
raise (
ApiError(
error_code="no_human_task",
message="Cannot find an human task with task id '{task_id}' for process instance {process_instance_id}.",
status_code=500,
)
)
ProcessInstanceService.complete_form_task(
processor=processor,
spiff_task=spiff_task,
data=body,
user=g.user,
human_task=human_task,
)
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
# task spec, complete that form as well.
# if update_all:
# last_index = spiff_task.task_info()["mi_index"]
# next_task = processor.next_task()
# while next_task and next_task.task_info()["mi_index"] > last_index:
# __update_task(processor, next_task, form_data, user)
# last_index = next_task.task_info()["mi_index"]
# next_task = processor.next_task()
next_human_task_assigned_to_me = (
HumanTaskModel.query.filter_by(
process_instance_id=process_instance_id, completed=False
)
.order_by(asc(HumanTaskModel.id)) # type: ignore
.join(HumanTaskUserModel)
.filter_by(user_id=principal.user_id)
.first()
)
if next_human_task_assigned_to_me:
return make_response(
jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200
)
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
def _get_tasks(
processes_started_by_user: bool = True,
has_lane_assignment_id: bool = True,
page: int = 1,
per_page: int = 100,
user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Get_tasks."""
user_id = g.user.id
# use distinct to ensure we only get one row per human task otherwise
# we can get back multiple for the same human task row which throws off
# pagination later on
# https://stackoverflow.com/q/34582014/6090676
human_tasks_query = (
HumanTaskModel.query.distinct()
.outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
.join(ProcessInstanceModel)
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
.filter(HumanTaskModel.completed == False) # noqa: E712
)
if processes_started_by_user:
human_tasks_query = human_tasks_query.filter(
ProcessInstanceModel.process_initiator_id == user_id
).outerjoin(
HumanTaskUserModel,
and_(
HumanTaskUserModel.user_id == user_id,
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
),
)
else:
human_tasks_query = human_tasks_query.filter(
ProcessInstanceModel.process_initiator_id != user_id
).join(
HumanTaskUserModel,
and_(
HumanTaskUserModel.user_id == user_id,
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
),
)
if has_lane_assignment_id:
if user_group_identifier:
human_tasks_query = human_tasks_query.filter(
GroupModel.identifier == user_group_identifier
)
else:
human_tasks_query = human_tasks_query.filter(
HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
)
else:
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
human_tasks = (
human_tasks_query.add_columns(
ProcessInstanceModel.process_model_identifier,
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
ProcessInstanceModel.updated_at_in_seconds,
ProcessInstanceModel.created_at_in_seconds,
UserModel.username,
GroupModel.identifier.label("user_group_identifier"),
HumanTaskModel.task_name,
HumanTaskModel.task_title,
HumanTaskModel.process_model_display_name,
HumanTaskModel.process_instance_id,
HumanTaskUserModel.user_id.label("current_user_is_potential_owner"),
)
.order_by(desc(HumanTaskModel.id)) # type: ignore
.paginate(page=page, per_page=per_page, error_out=False)
)
response_json = {
"results": human_tasks.items,
"pagination": {
"count": len(human_tasks.items),
"total": human_tasks.total,
"pages": human_tasks.pages,
},
}
return make_response(jsonify(response_json), 200)
def _prepare_form_data(
form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo
) -> str:
"""Prepare_form_data."""
if task_data is None:
return ""
file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
return _render_jinja_template(file_contents, task_data)
def _render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str:
"""Render_jinja_template."""
jinja_environment = jinja2.Environment(
autoescape=True, lstrip_blocks=True, trim_blocks=True
)
template = jinja_environment.from_string(unprocessed_template)
return template.render(**data)
def _get_spiff_task_from_process_instance(
task_id: str,
process_instance: ProcessInstanceModel,
processor: Union[ProcessInstanceProcessor, None] = None,
) -> SpiffTask:
"""Get_spiff_task_from_process_instance."""
if processor is None:
processor = ProcessInstanceProcessor(process_instance)
task_uuid = uuid.UUID(task_id)
spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
if spiff_task is None:
raise (
ApiError(
error_code="empty_task",
message="Processor failed to obtain task.",
status_code=500,
)
)
return spiff_task
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
def _update_form_schema_with_task_data_as_needed(
in_dict: dict, task_data: dict
) -> None:
"""Update_nested."""
for k, value in in_dict.items():
if "anyOf" == k:
# value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"]
if isinstance(value, list):
if len(value) == 1:
first_element_in_value_list = value[0]
if isinstance(first_element_in_value_list, str):
if first_element_in_value_list.startswith(
"options_from_task_data_var:"
):
task_data_var = first_element_in_value_list.replace(
"options_from_task_data_var:", ""
)
if task_data_var not in task_data:
raise (
ApiError(
error_code="missing_task_data_var",
message=f"Task data is missing variable: {task_data_var}",
status_code=500,
)
)
select_options_from_task_data = task_data.get(task_data_var)
if isinstance(select_options_from_task_data, list):
if all(
"value" in d and "label" in d
for d in select_options_from_task_data
):
def map_function(
task_data_select_option: TaskDataSelectOption,
) -> ReactJsonSchemaSelectOption:
"""Map_function."""
return {
"type": "string",
"enum": [task_data_select_option["value"]],
"title": task_data_select_option["label"],
}
options_for_react_json_schema_form = list(
map(map_function, select_options_from_task_data)
)
in_dict[k] = options_for_react_json_schema_form
elif isinstance(value, dict):
_update_form_schema_with_task_data_as_needed(value, task_data)
elif isinstance(value, list):
for o in value:
if isinstance(o, dict):
_update_form_schema_with_task_data_as_needed(o, task_data)

View File

@ -2604,6 +2604,7 @@ class TestProcessApi(BaseTest):
f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?all_tasks=true",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
end = next(task for task in response.json if task["name"] == "End")
assert end["data"]["result"] == {"message": "message 1"}

View File

@ -0,0 +1,45 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# see also: npx cypress run --env grep="can filter",grepFilterSpecs=true
# https://github.com/cypress-io/cypress/tree/develop/npm/grep#pre-filter-specs-grepfilterspecs
test_case_matches="$(rg '^ it\(')"
stats_file="/var/tmp/cypress_stats.txt"
function run_all_test_cases() {
local stat_index="$1"
pushd "$NO_TERM_LIMITS_PROJECTS_DIR/github/sartography/sample-process-models"
gitc
popd
while read -r test_case_line; do
test_case_file="$(awk -F: '{print $1}' <<< "$test_case_line")"
test_case_name_side="$(awk -F: '{print $2}' <<< "$test_case_line")"
test_case_name=$(hot_sed -E "s/^\s+it\('(.+)'.*/\1/" <<< "$test_case_name_side")
echo "running test case: $test_case_file::$test_case_name"
if ./node_modules/.bin/cypress run --e2e --browser chrome --spec "$test_case_file" --env grep="$test_case_name"; then
echo "$stat_index:::$test_case_file:::$test_case_name: PASS" >> "$stats_file"
else
echo "$stat_index:::$test_case_file:::$test_case_name: FAIL" >> "$stats_file"
fi
done <<< "$test_case_matches"
}
# clear the stats file
echo > "$stats_file"
for global_stat_index in {1..100}; do
run_all_test_cases "$global_stat_index"
done
# prints summary of most-failing test cases
grep FAIL "$stats_file" | awk -F ':::' '{for (i=2; i<NF; i++) printf $i " "; print $NF}' | sort | uniq -c | sort -n

View File

@ -50,7 +50,7 @@ describe('process-models', () => {
cy.contains(modelDisplayName).should('not.exist');
});
it('can create new bpmn, dmn, and json files', () => {
it('can create new bpmn and dmn and json files', () => {
const uuid = () => Cypress._.random(0, 1e6);
const id = uuid();
const directParentGroupId = 'acceptance-tests-group-one';
@ -142,6 +142,9 @@ describe('process-models', () => {
);
cy.contains(modelId).should('not.exist');
cy.contains(modelDisplayName).should('not.exist');
// we go back to the parent process group after deleting the model
cy.get('.tile-process-group-content-container').should('exist');
});
it('can upload and run a bpmn file', () => {

View File

@ -120,6 +120,6 @@ describe('tasks', () => {
kickOffModelWithForm();
cy.navigateToHome();
cy.basicPaginationTest();
cy.basicPaginationTest('process-instance-show-link');
});
});

View File

@ -116,30 +116,33 @@ Cypress.Commands.add(
}
);
Cypress.Commands.add('basicPaginationTest', () => {
cy.getBySel('pagination-options').scrollIntoView();
cy.get('.cds--select__item-count').find('.cds--select-input').select('2');
Cypress.Commands.add(
'basicPaginationTest',
(dataQaTagToUseToEnsureTableHasLoaded = 'paginated-entity-id') => {
cy.getBySel('pagination-options').scrollIntoView();
cy.get('.cds--select__item-count').find('.cds--select-input').select('2');
// NOTE: this is a em dash instead of en dash
cy.contains(/\b12 of \d+/);
// NOTE: this is a em dash instead of en dash
cy.contains(/\b12 of \d+/);
// ok, trying to ensure that we have everything loaded before we leave this
// function and try to sign out. Just showing results 1-2 of blah is not good enough,
// since the ajax request may not have finished yet.
// to be sure it's finished, grab the log id from page 1. remember it.
// then use the magical contains command that waits for the element to exist AND
// for that element to contain the text we're looking for.
cy.getBySel('paginated-entity-id')
.first()
.then(($element) => {
const oldId = $element.text().trim();
cy.get('.cds--pagination__button--forward').click();
cy.contains(/\b34 of \d+/);
cy.get('.cds--pagination__button--backward').click();
cy.contains(/\b12 of \d+/);
cy.contains('[data-qa=paginated-entity-id]', oldId);
});
});
// ok, trying to ensure that we have everything loaded before we leave this
// function and try to sign out. Just showing results 1-2 of blah is not good enough,
// since the ajax request may not have finished yet.
// to be sure it's finished, grab the log id from page 1. remember it.
// then use the magical contains command that waits for the element to exist AND
// for that element to contain the text we're looking for.
cy.getBySel(dataQaTagToUseToEnsureTableHasLoaded)
.first()
.then(($element) => {
const oldId = $element.text().trim();
cy.get('.cds--pagination__button--forward').click();
cy.contains(/\b34 of \d+/);
cy.get('.cds--pagination__button--backward').click();
cy.contains(/\b12 of \d+/);
cy.contains(`[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`, oldId);
});
}
);
Cypress.Commands.add('assertAtLeastOneItemInPaginatedResults', () => {
cy.contains(/\b[1-9]\d*[1-9]\d* of [1-9]\d*/);