Merge remote-tracking branch 'origin/main' into feature/call-activity-references

This commit is contained in:
jasquat 2023-04-24 16:17:32 -04:00
commit 60684126af
33 changed files with 744 additions and 385 deletions

View File

@ -276,7 +276,7 @@ jobs:
uses: codecov/codecov-action@v3.1.3
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@v1.8
uses: sonarsource/sonarcloud-github-action@v1.9
# thought about just skipping dependabot
# if: ${{ github.actor != 'dependabot[bot]' }}
# but figured all pull requests seems better, since none of them will have access to sonarcloud.

View File

@ -21,10 +21,13 @@ from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
from SpiffWorkflow.task import Task # type: ignore
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
from spiffworkflow_backend.services.task_service import TaskModelException
from spiffworkflow_backend.services.task_service import TaskService
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
@ -36,17 +39,17 @@ class ApiError(Exception):
error_code: str
message: str
error_line: str = ""
error_type: str = ""
file_name: str = ""
line_number: int = 0
offset: int = 0
error_line: str | None = ""
error_type: str | None = ""
file_name: str | None = ""
line_number: int | None = 0
offset: int | None = 0
sentry_link: str | None = None
status_code: int = 400
tag: str = ""
status_code: int | None = 400
tag: str | None = ""
task_data: dict | str | None = field(default_factory=dict)
task_id: str = ""
task_name: str = ""
task_id: str | None = ""
task_name: str | None = ""
task_trace: list | None = field(default_factory=list)
def __str__(self) -> str:
@ -96,6 +99,44 @@ class ApiError(Exception):
return instance
@classmethod
def from_task_model(
cls,
error_code: str,
message: str,
task_model: TaskModel,
status_code: int | None = 400,
line_number: int | None = 0,
offset: int | None = 0,
error_type: str | None = "",
error_line: str | None = "",
task_trace: list | None = None,
) -> ApiError:
"""Constructs an API Error with details pulled from the current task model."""
instance = cls(error_code, message, status_code=status_code)
task_definition = task_model.task_definition
instance.task_id = task_definition.bpmn_identifier
instance.task_name = task_definition.bpmn_name or ""
instance.line_number = line_number
instance.offset = offset
instance.error_type = error_type
instance.error_line = error_line
if task_trace:
instance.task_trace = task_trace
else:
instance.task_trace = TaskModelException.get_task_trace(task_model)
try:
spec_reference = TaskService.get_spec_reference_from_bpmn_process(task_model.bpmn_process)
instance.file_name = spec_reference.file_name
except Exception as exception:
current_app.logger.error(exception)
# Assure that there is nothing in the json data that can't be serialized.
instance.task_data = ApiError.remove_unserializeable_from_dict(task_model.get_data())
return instance
@staticmethod
def remove_unserializeable_from_dict(my_dict: dict) -> dict:
"""Removes unserializeable from dict."""
@ -157,6 +198,18 @@ class ApiError(Exception):
error_line=exp.error_line,
task_trace=exp.task_trace,
)
elif isinstance(exp, TaskModelException):
# Note that WorkflowDataExceptions are also WorkflowTaskExceptions
return ApiError.from_task_model(
error_code,
message + ". " + str(exp),
exp.task_model,
line_number=exp.line_number,
offset=exp.offset,
error_type=exp.error_type,
error_line=exp.error_line,
task_trace=exp.task_trace,
)
elif isinstance(exp, WorkflowException) and exp.task_spec:
msg = message + ". " + str(exp)
return ApiError.from_task_spec(error_code, msg, exp.task_spec)

View File

@ -76,6 +76,19 @@ class TaskModel(SpiffworkflowBaseDBModel):
data: Optional[dict] = None
# these are here to be compatible with task api
form_schema: Optional[dict] = None
form_ui_schema: Optional[dict] = None
process_model_display_name: Optional[str] = None
process_model_identifier: Optional[str] = None
typename: Optional[str] = None
can_complete: Optional[bool] = None
extensions: Optional[dict] = None
name_for_display: Optional[str] = None
def get_data(self) -> dict:
return {**self.python_env_data(), **self.json_data()}
def python_env_data(self) -> dict:
return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash)

View File

@ -43,7 +43,6 @@ from spiffworkflow_backend.models.process_instance import (
)
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import (
@ -54,6 +53,8 @@ from spiffworkflow_backend.routes.process_api_blueprint import (
)
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.authorization_service import HumanTaskNotFoundError
from spiffworkflow_backend.services.authorization_service import UserDoesNotHaveAccessToTaskError
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@ -64,8 +65,10 @@ from spiffworkflow_backend.services.process_instance_queue_service import (
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskModelException
from spiffworkflow_backend.services.task_service import TaskService
@ -217,7 +220,7 @@ def task_data_update(
)
if json_data_dict is not None:
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid
)
try:
@ -265,7 +268,6 @@ def manual_complete_task(
def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappers.Response:
"""Task_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if process_instance.status == ProcessInstanceStatus.suspended.value:
@ -279,17 +281,12 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe
process_instance.process_model_identifier,
)
# _find_human_task_or_raise(process_instance_id, task_guid)
form_schema_file_name = ""
form_ui_schema_file_name = ""
processor = ProcessInstanceProcessor(process_instance)
if task_guid == "next":
spiff_task = processor.next_task()
task_guid = spiff_task.id
else:
spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor)
extensions = spiff_task.task_spec.extensions
task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
task_definition = task_model.task_definition
extensions = TaskService.get_extensions_from_task_model(task_model)
if "properties" in extensions:
properties = extensions["properties"]
@ -297,25 +294,40 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe
form_schema_file_name = properties["formJsonSchemaFilename"]
if "formUiSchemaFilename" in properties:
form_ui_schema_file_name = properties["formUiSchemaFilename"]
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
task.data = spiff_task.data
task.process_model_display_name = process_model.display_name
task.process_model_identifier = process_model.id
can_complete = False
try:
AuthorizationService.assert_user_can_complete_task(
process_instance.id, task_definition.bpmn_identifier, g.user
)
can_complete = True
except HumanTaskNotFoundError:
can_complete = False
except UserDoesNotHaveAccessToTaskError:
can_complete = False
task_model.data = task_model.get_data()
task_model.process_model_display_name = process_model.display_name
task_model.process_model_identifier = process_model.id
task_model.typename = task_definition.typename
task_model.can_complete = can_complete
task_process_identifier = task_model.bpmn_process.bpmn_process_definition.bpmn_identifier
task_model.name_for_display = TaskService.get_name_for_display(task_definition)
process_model_with_form = process_model
refs = SpecFileService.get_references_for_process(process_model_with_form)
all_processes = [i.identifier for i in refs]
if task.process_identifier not in all_processes:
top_process_name = processor.find_process_model_process_name_by_task_name(task.process_identifier)
if task_process_identifier not in all_processes:
top_bpmn_process = TaskService.bpmn_process_for_called_activity_or_top_level_process(task_model)
bpmn_file_full_path = ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
top_process_name
top_bpmn_process.bpmn_process_definition.bpmn_identifier
)
relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path())
process_model_relative_path = os.path.dirname(relative_path)
process_model_with_form = ProcessModelService.get_process_model_from_relative_path(process_model_relative_path)
if task.type == "User Task":
if task_definition.typename == "UserTask":
if not form_schema_file_name:
raise (
ApiError(
@ -330,79 +342,60 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe
form_dict = _prepare_form_data(
form_schema_file_name,
spiff_task,
task_model,
process_model_with_form,
)
if task.data:
_update_form_schema_with_task_data_as_needed(form_dict, task, spiff_task)
if task_model.data:
_update_form_schema_with_task_data_as_needed(form_dict, task_model)
if form_dict:
task.form_schema = form_dict
task_model.form_schema = form_dict
if form_ui_schema_file_name:
ui_form_contents = _prepare_form_data(
form_ui_schema_file_name,
task,
task_model,
process_model_with_form,
)
if ui_form_contents:
task.form_ui_schema = ui_form_contents
task_model.form_ui_schema = ui_form_contents
_munge_form_ui_schema_based_on_hidden_fields_in_task_data(task)
_render_instructions_for_end_user(spiff_task, task)
return make_response(jsonify(task), 200)
_munge_form_ui_schema_based_on_hidden_fields_in_task_data(task_model)
_render_instructions_for_end_user(task_model, extensions)
task_model.extensions = extensions
return make_response(jsonify(task_model), 200)
def _render_instructions_for_end_user(spiff_task: SpiffTask, task: Task) -> str:
def _render_instructions_for_end_user(task_model: TaskModel, extensions: Optional[dict] = None) -> str:
"""Assure any instructions for end user are processed for jinja syntax."""
if task.properties and "instructionsForEndUser" in task.properties:
if task.properties["instructionsForEndUser"]:
if extensions is None:
extensions = TaskService.get_extensions_from_task_model(task_model)
if extensions and "instructionsForEndUser" in extensions:
if extensions["instructionsForEndUser"]:
try:
instructions = _render_jinja_template(task.properties["instructionsForEndUser"], spiff_task)
task.properties["instructionsForEndUser"] = instructions
instructions = _render_jinja_template(extensions["instructionsForEndUser"], task_model)
extensions["instructionsForEndUser"] = instructions
return instructions
except WorkflowTaskException as wfe:
except TaskModelException as wfe:
wfe.add_note("Failed to render instructions for end user.")
raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe
return ""
def process_data_show(
process_instance_id: int,
process_data_identifier: str,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_data_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance)
all_process_data = processor.get_data()
process_data_value = None
if process_data_identifier in all_process_data:
process_data_value = all_process_data[process_data_identifier]
return make_response(
jsonify(
{
"process_data_identifier": process_data_identifier,
"process_data_value": process_data_value,
}
),
200,
)
def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[str], None]:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[str, Optional[str], None]:
processor = ProcessInstanceProcessor(process_instance)
reported_ids = [] # bit of an issue with end tasks showing as getting completed twice.
spiff_task = processor.next_task()
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
last_task = None
while last_task != spiff_task:
task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
instructions = _render_instructions_for_end_user(spiff_task, task)
extensions = TaskService.get_extensions_from_task_model(task_model)
instructions = _render_instructions_for_end_user(task_model, extensions)
if instructions and spiff_task.id not in reported_ids:
reported_ids.append(spiff_task.id)
task.properties = extensions
yield f"data: {current_app.json.dumps(task)} \n\n"
last_task = spiff_task
try:
@ -425,6 +418,7 @@ def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[st
# Note, this has to be done in case someone leaves the page,
# which can otherwise cancel this function and leave completed tasks un-registered.
spiff_task = processor.next_task()
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
# Always provide some response, in the event no instructions were provided.
if len(reported_ids) == 0:
@ -432,10 +426,16 @@ def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[st
yield f"data: {current_app.json.dumps(task)} \n\n"
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[str, Optional[str], None]:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
with ProcessInstanceQueueService.dequeued(process_instance):
yield from _interstitial_stream(process_instance)
def interstitial(process_instance_id: int) -> Response:
"""A Server Side Events Stream for watching the execution of engine tasks."""
return Response(
stream_with_context(_interstitial_stream(process_instance_id)),
stream_with_context(_dequeued_interstitial_stream(process_instance_id)),
mimetype="text/event-stream",
headers={"X-Accel-Buffering": "no"},
)
@ -461,7 +461,7 @@ def _task_submit_shared(
processor = ProcessInstanceProcessor(process_instance)
spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor)
AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user)
AuthorizationService.assert_user_can_complete_task(process_instance.id, spiff_task.task_spec.name, principal.user)
if spiff_task.state != TaskState.READY:
raise (
@ -649,14 +649,14 @@ def _get_tasks(
return make_response(jsonify(response_json), 200)
def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo) -> dict:
def _prepare_form_data(form_file: str, task_model: TaskModel, process_model: ProcessModelInfo) -> dict:
"""Prepare_form_data."""
if spiff_task.data is None:
if task_model.data is None:
return {}
file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
try:
form_contents = _render_jinja_template(file_contents, spiff_task)
form_contents = _render_jinja_template(file_contents, task_model)
try:
# form_contents is a str
hot_dict: dict = json.loads(form_contents)
@ -669,21 +669,21 @@ def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: Pro
status_code=400,
)
) from exception
except WorkflowTaskException as wfe:
except TaskModelException as wfe:
wfe.add_note(f"Error in Json Form File '{form_file}'")
api_error = ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe)
api_error.file_name = form_file
raise api_error
def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> str:
def _render_jinja_template(unprocessed_template: str, task_model: TaskModel) -> str:
"""Render_jinja_template."""
jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True)
try:
template = jinja_environment.from_string(unprocessed_template)
return template.render(**spiff_task.data)
return template.render(**(task_model.get_data()))
except jinja2.exceptions.TemplateError as template_error:
wfe = WorkflowTaskException(str(template_error), task=spiff_task, exception=template_error)
wfe = TaskModelException(str(template_error), task_model=task_model, exception=template_error)
if isinstance(template_error, TemplateSyntaxError):
wfe.line_number = template_error.lineno
wfe.error_line = template_error.source.split("\n")[template_error.lineno - 1]
@ -691,7 +691,7 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
raise wfe from template_error
except Exception as error:
_type, _value, tb = exc_info()
wfe = WorkflowTaskException(str(error), task=spiff_task, exception=error)
wfe = TaskModelException(str(error), task_model=task_model, exception=error)
while tb:
if tb.tb_frame.f_code.co_filename == "<template>":
wfe.line_number = tb.tb_lineno
@ -724,9 +724,9 @@ def _get_spiff_task_from_process_instance(
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spiff_task: SpiffTask) -> None:
def _update_form_schema_with_task_data_as_needed(in_dict: dict, task_model: TaskModel) -> None:
"""Update_nested."""
if task.data is None:
if task_model.data is None:
return None
for k, value in in_dict.items():
@ -739,25 +739,18 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spif
if first_element_in_value_list.startswith("options_from_task_data_var:"):
task_data_var = first_element_in_value_list.replace("options_from_task_data_var:", "")
if task_data_var not in task.data:
wte = WorkflowTaskException(
(
"Error building form. Attempting to create a"
" selection list with options from variable"
f" '{task_data_var}' but it doesn't exist in"
" the Task Data."
),
task=spiff_task,
if task_data_var not in task_model.data:
message = (
"Error building form. Attempting to create a selection list with options from"
f" variable '{task_data_var}' but it doesn't exist in the Task Data."
)
raise (
ApiError.from_workflow_exception(
error_code="missing_task_data_var",
message=str(wte),
exp=wte,
)
raise ApiError(
error_code="missing_task_data_var",
message=message,
status_code=500,
)
select_options_from_task_data = task.data.get(task_data_var)
select_options_from_task_data = task_model.data.get(task_data_var)
if isinstance(select_options_from_task_data, list):
if all("value" in d and "label" in d for d in select_options_from_task_data):
@ -777,11 +770,11 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spif
in_dict[k] = options_for_react_json_schema_form
elif isinstance(value, dict):
_update_form_schema_with_task_data_as_needed(value, task, spiff_task)
_update_form_schema_with_task_data_as_needed(value, task_model)
elif isinstance(value, list):
for o in value:
if isinstance(o, dict):
_update_form_schema_with_task_data_as_needed(o, task, spiff_task)
_update_form_schema_with_task_data_as_needed(o, task_model)
def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
@ -826,15 +819,15 @@ def _find_human_task_or_raise(
return human_task
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
if task.form_ui_schema is None:
task.form_ui_schema = {}
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task_model: TaskModel) -> None:
if task_model.form_ui_schema is None:
task_model.form_ui_schema = {}
if task.data and "form_ui_hidden_fields" in task.data:
hidden_fields = task.data["form_ui_hidden_fields"]
if task_model.data and "form_ui_hidden_fields" in task_model.data:
hidden_fields = task_model.data["form_ui_hidden_fields"]
for hidden_field in hidden_fields:
hidden_field_parts = hidden_field.split(".")
relevant_depth_of_ui_schema = task.form_ui_schema
relevant_depth_of_ui_schema = task_model.form_ui_schema
for ii, hidden_field_part in enumerate(hidden_field_parts):
if hidden_field_part not in relevant_depth_of_ui_schema:
relevant_depth_of_ui_schema[hidden_field_part] = {}

View File

@ -17,7 +17,6 @@ from flask import current_app
from flask import g
from flask import request
from flask import scaffold
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from sqlalchemy import or_
from sqlalchemy import text
@ -66,9 +65,15 @@ class PermissionToAssign:
target_uri: str
# you can explicitly call out the CRUD actions you want to permit. These include: ["create", "read", "update", "delete"]
# if you hate typing, you can instead specify "all". If you do this, you might think it would grant access to
# ["create", "read", "update", "delete"] for everything. instead, we do this cute thing where we, as the API authors,
# understand that not all verbs are relevant for all API paths. For example, you cannot create logs over the API at this juncture,
# so for /logs, only "read" is relevant. When you ask for /logs, "all", we give you read.
# the relevant permissions are the only API methods that are currently available for each path prefix.
# if we add further API methods, we'll need to evaluate whether they should be added here.
PATH_SEGMENTS_FOR_PERMISSION_ALL = [
{"path": "/event-error-details", "relevant_permissions": ["read"]},
{"path": "/logs", "relevant_permissions": ["read"]},
{
"path": "/process-instances",
@ -412,27 +417,26 @@ class AuthorizationService:
) from exception
@staticmethod
def assert_user_can_complete_spiff_task(
def assert_user_can_complete_task(
process_instance_id: int,
spiff_task: SpiffTask,
task_bpmn_identifier: str,
user: UserModel,
) -> bool:
"""Assert_user_can_complete_spiff_task."""
human_task = HumanTaskModel.query.filter_by(
task_name=spiff_task.task_spec.name,
task_name=task_bpmn_identifier,
process_instance_id=process_instance_id,
completed=False,
).first()
if human_task is None:
raise HumanTaskNotFoundError(
f"Could find an human task with task name '{spiff_task.task_spec.name}'"
f"Could find an human task with task name '{task_bpmn_identifier}'"
f" for process instance '{process_instance_id}'"
)
if user not in human_task.potential_owners:
raise UserDoesNotHaveAccessToTaskError(
f"User {user.username} does not have access to update"
f" task'{spiff_task.task_spec.name}' for process instance"
f" task'{task_bpmn_identifier}' for process instance"
f" '{process_instance_id}'"
)
return True
@ -543,6 +547,7 @@ class AuthorizationService:
f"/process-instances/for-me/{process_related_path_segment}",
f"/logs/{process_related_path_segment}",
f"/process-data-file-download/{process_related_path_segment}",
f"/event-error-details/{process_related_path_segment}",
]:
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri=target_uri))
else:

View File

@ -96,6 +96,7 @@ from spiffworkflow_backend.services.element_units_service import (
)
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -858,114 +859,6 @@ class ProcessInstanceProcessor:
db.session.add(pim)
db.session.commit()
# FIXME: Better to move to SpiffWorkflow and traverse the outer_workflows on the spiff_task
# We may need to add whether a subprocess is a call activity or a subprocess in order to do it properly
def get_all_processes_with_task_name_list(self) -> dict[str, list[str]]:
"""Gets the list of processes pointing to a list of task names.
This is useful for figuring out which process contain which task.
Rerturns: {process_name: [task_1, task_2, ...], ...}
"""
bpmn_definition_dict = self.full_bpmn_process_dict
processes: dict[str, list[str]] = {bpmn_definition_dict["spec"]["name"]: []}
for task_name, _task_spec in bpmn_definition_dict["spec"]["task_specs"].items():
processes[bpmn_definition_dict["spec"]["name"]].append(task_name)
if "subprocess_specs" in bpmn_definition_dict:
for subprocess_name, subprocess_details in bpmn_definition_dict["subprocess_specs"].items():
processes[subprocess_name] = []
if "task_specs" in subprocess_details:
for task_name, _task_spec in subprocess_details["task_specs"].items():
processes[subprocess_name].append(task_name)
return processes
def find_process_model_process_name_by_task_name(
self, task_name: str, processes: Optional[dict[str, list[str]]] = None
) -> str:
"""Gets the top level process of a process model using the task name that the process contains.
For example, process_modelA has processA which has a call activity that calls processB which is inside of process_modelB.
processB has subprocessA which has taskA. Using taskA this method should return processB and then that can be used with
the spec reference cache to find process_modelB.
"""
process_name_to_return = task_name
if processes is None:
processes = self.get_all_processes_with_task_name_list()
for process_name, task_spec_names in processes.items():
if task_name in task_spec_names:
process_name_to_return = self.find_process_model_process_name_by_task_name(process_name, processes)
return process_name_to_return
#################################################################
def get_all_task_specs(self) -> dict[str, dict]:
"""This looks both at top level task_specs and subprocess_specs in the serialized data.
It returns a dict of all task specs based on the task name like it is in the serialized form.
NOTE: this may not fully work for tasks that are NOT call activities since their task_name may not be unique
but in our current use case we only care about the call activities here.
"""
bpmn_definition_dict = self.full_bpmn_process_dict
spiff_task_json = bpmn_definition_dict["spec"]["task_specs"] or {}
if "subprocess_specs" in bpmn_definition_dict:
for _subprocess_name, subprocess_details in bpmn_definition_dict["subprocess_specs"].items():
if "task_specs" in subprocess_details:
spiff_task_json = spiff_task_json | subprocess_details["task_specs"]
return spiff_task_json
def get_subprocesses_by_child_task_ids(self) -> Tuple[dict, dict]:
"""Get all subprocess ids based on the child task ids.
This is useful when trying to link the child task of a call activity back to
the call activity that called it to get the appropriate data. For example, if you
have a call activity "Log" that you call twice within the same process, the Hammer log file
activity within the Log process will get called twice. They will potentially have different
task data. We want to be able to differentiate those two activities.
subprocess structure in the json:
"subprocesses": { [subprocess_task_id]: "tasks" : { [task_id]: [bpmn_task_details] }}
Also note that subprocess_task_id might in fact be a call activity, because spiff treats
call activities like subprocesses in terms of the serialization.
"""
process_instance_data_dict = self.full_bpmn_process_dict
spiff_task_json = self.get_all_task_specs()
subprocesses_by_child_task_ids = {}
task_typename_by_task_id = {}
if "subprocesses" in process_instance_data_dict:
for subprocess_id, subprocess_details in process_instance_data_dict["subprocesses"].items():
for task_id, task_details in subprocess_details["tasks"].items():
subprocesses_by_child_task_ids[task_id] = subprocess_id
task_name = task_details["task_spec"]
if task_name in spiff_task_json:
task_typename_by_task_id[task_id] = spiff_task_json[task_name]["typename"]
return (subprocesses_by_child_task_ids, task_typename_by_task_id)
def get_highest_level_calling_subprocesses_by_child_task_ids(
self, subprocesses_by_child_task_ids: dict, task_typename_by_task_id: dict
) -> dict:
"""Ensure task ids point to the top level subprocess id.
This is done by checking if a subprocess is also a task until the subprocess is no longer a task or a Call Activity.
"""
for task_id, subprocess_id in subprocesses_by_child_task_ids.items():
if subprocess_id in subprocesses_by_child_task_ids:
current_subprocess_id_for_task = subprocesses_by_child_task_ids[task_id]
if current_subprocess_id_for_task in task_typename_by_task_id:
# a call activity is like the top-level subprocess since it is the calling subprocess
# according to spiff and the top-level calling subprocess is really what we care about
if task_typename_by_task_id[current_subprocess_id_for_task] == "CallActivity":
continue
subprocesses_by_child_task_ids[task_id] = subprocesses_by_child_task_ids[subprocess_id]
self.get_highest_level_calling_subprocesses_by_child_task_ids(
subprocesses_by_child_task_ids, task_typename_by_task_id
)
return subprocesses_by_child_task_ids
def _store_bpmn_process_definition(
self,
process_bpmn_properties: dict,
@ -1304,7 +1197,9 @@ class ProcessInstanceProcessor:
db.session.bulk_save_objects(new_task_models.values())
TaskService.insert_or_update_json_data_records(new_json_data_dicts)
TaskService.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id)
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, event_type, task_guid=task_id
)
self.save()
# Saving the workflow seems to reset the status
self.suspend()
@ -1317,7 +1212,7 @@ class ProcessInstanceProcessor:
def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None:
"""Reset a process to an earlier state."""
# raise Exception("This feature to reset a process instance to a given task is currently unavaiable")
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid
)
@ -1797,10 +1692,10 @@ class ProcessInstanceProcessor:
# If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task
# and return that
next_task = None
next_task_to_return = None
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK):
next_task = task
return next_task
next_task_to_return = task
return next_task_to_return
def completed_user_tasks(self) -> List[SpiffTask]:
"""Completed_user_tasks."""
@ -1846,7 +1741,7 @@ class ProcessInstanceProcessor:
TaskService.update_json_data_dicts_using_list(json_data_dict_list, json_data_dict_mapping)
TaskService.insert_or_update_json_data_records(json_data_dict_mapping)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model,
ProcessInstanceEventType.task_completed.value,
task_guid=task_model.guid,
@ -1950,7 +1845,7 @@ class ProcessInstanceProcessor:
self.save()
self.process_instance_model.status = "terminated"
db.session.add(self.process_instance_model)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, ProcessInstanceEventType.process_instance_terminated.value
)
db.session.commit()
@ -1959,7 +1854,7 @@ class ProcessInstanceProcessor:
"""Suspend."""
self.process_instance_model.status = ProcessInstanceStatus.suspended.value
db.session.add(self.process_instance_model)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, ProcessInstanceEventType.process_instance_suspended.value
)
db.session.commit()
@ -1968,7 +1863,7 @@ class ProcessInstanceProcessor:
"""Resume."""
self.process_instance_model.status = ProcessInstanceStatus.waiting.value
db.session.add(self.process_instance_model)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, ProcessInstanceEventType.process_instance_resumed.value
)
db.session.commit()

View File

@ -14,7 +14,7 @@ from spiffworkflow_backend.models.process_instance_queue import (
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.task_service import TaskService
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError
@ -103,7 +103,7 @@ class ProcessInstanceQueueService:
# these events are handled in the WorkflowExecutionService.
# that is, we don't need to add error_detail records here, etc.
if not isinstance(ex, WorkflowExecutionServiceError):
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex
)
db.session.commit()

View File

@ -344,7 +344,7 @@ class ProcessInstanceService:
data: dict[str, Any],
user: UserModel,
) -> None:
AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, user)
AuthorizationService.assert_user_can_complete_task(process_instance.id, spiff_task.task_spec.name, user)
cls.save_file_data_and_replace_with_digest_references(
data,
process_instance.id,
@ -442,8 +442,8 @@ class ProcessInstanceService:
# can complete it.
can_complete = False
try:
AuthorizationService.assert_user_can_complete_spiff_task(
processor.process_instance_model.id, spiff_task, g.user
AuthorizationService.assert_user_can_complete_task(
processor.process_instance_model.id, spiff_task.task_spec.name, g.user
)
can_complete = True
except HumanTaskNotFoundError:

View File

@ -0,0 +1,79 @@
import time
import traceback
from typing import Optional
from typing import Tuple
from flask import g
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
class ProcessInstanceTmpService:
"""Temporary service to hold methods that should eventually be moved to ProcessInstanceService.
These methods cannot live there due to circular import issues with the ProcessInstanceProcessor.
"""
# TODO: move to process_instance_service once we clean it and the processor up
@classmethod
def add_event_to_process_instance(
cls,
process_instance: ProcessInstanceModel,
event_type: str,
task_guid: Optional[str] = None,
user_id: Optional[int] = None,
exception: Optional[Exception] = None,
timestamp: Optional[float] = None,
add_to_db_session: Optional[bool] = True,
) -> Tuple[ProcessInstanceEventModel, Optional[ProcessInstanceErrorDetailModel]]:
if user_id is None and hasattr(g, "user") and g.user:
user_id = g.user.id
if timestamp is None:
timestamp = time.time()
process_instance_event = ProcessInstanceEventModel(
process_instance_id=process_instance.id, event_type=event_type, timestamp=timestamp, user_id=user_id
)
if task_guid:
process_instance_event.task_guid = task_guid
if add_to_db_session:
db.session.add(process_instance_event)
process_instance_error_detail = None
if exception is not None:
# truncate to avoid database errors on large values. We observed that text in mysql is 65K.
stacktrace = traceback.format_exc().split("\n")
message = str(exception)[0:1023]
task_line_number = None
task_line_contents = None
task_trace = None
task_offset = None
# check for the class name string for ApiError to avoid circular imports
if isinstance(exception, WorkflowTaskException) or (
exception.__class__.__name__ == "ApiError" and exception.error_code == "task_error" # type: ignore
):
task_line_number = exception.line_number # type: ignore
task_line_contents = exception.error_line[0:255] # type: ignore
task_trace = exception.task_trace # type: ignore
task_offset = exception.offset # type: ignore
process_instance_error_detail = ProcessInstanceErrorDetailModel(
process_instance_event=process_instance_event,
message=message,
stacktrace=stacktrace,
task_line_number=task_line_number,
task_line_contents=task_line_contents,
task_trace=task_trace,
task_offset=task_offset,
)
if add_to_db_session:
db.session.add(process_instance_error_detail)
return (process_instance_event, process_instance_error_detail)

View File

@ -1,34 +1,36 @@
import copy
import json
import time
import traceback
from hashlib import sha256
from typing import Optional
from typing import Tuple
from typing import TypedDict
from typing import Union
from uuid import UUID
from flask import current_app
from flask import g
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.task import TaskStateNames
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
class StartAndEndTimes(TypedDict):
@ -41,6 +43,71 @@ class JsonDataDict(TypedDict):
data: dict
class TaskModelException(Exception):
"""Copied from SpiffWorkflow.exceptions.WorkflowTaskException.
Reimplements the exception from SpiffWorkflow to not require a spiff_task.
"""
def __init__(
self,
error_msg: str,
task_model: TaskModel,
exception: Optional[Exception] = None,
line_number: Optional[int] = None,
offset: Optional[int] = None,
error_line: Optional[str] = None,
):
self.task_model = task_model
self.line_number = line_number
self.offset = offset
self.error_line = error_line
self.notes: list[str] = []
if exception:
self.error_type = exception.__class__.__name__
else:
self.error_type = "unknown"
if isinstance(exception, SyntaxError) and not line_number:
self.line_number = exception.lineno
self.offset = exception.offset
elif isinstance(exception, NameError):
self.add_note(
WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys()))
)
# If encountered in a sub-workflow, this traces back up the stack,
# so we can tell how we got to this particular task, no matter how
# deeply nested in sub-workflows it is. Takes the form of:
# task-description (file-name)
self.task_trace = self.get_task_trace(task_model)
def add_note(self, note: str) -> None:
self.notes.append(note)
def __str__(self) -> str:
"""Add notes to the error message."""
return super().__str__() + ". " + ". ".join(self.notes)
@classmethod
def get_task_trace(cls, task_model: TaskModel) -> list[str]:
task_definition = task_model.task_definition
task_bpmn_name = TaskService.get_name_for_display(task_definition)
bpmn_process = task_model.bpmn_process
spec_reference = TaskService.get_spec_reference_from_bpmn_process(bpmn_process)
task_trace = [f"{task_bpmn_name} ({spec_reference.file_name})"]
while bpmn_process.guid is not None:
caller_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
bpmn_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first()
spec_reference = TaskService.get_spec_reference_from_bpmn_process(bpmn_process)
task_trace.append(
f"{TaskService.get_name_for_display(caller_task_model.task_definition)} ({spec_reference.file_name})"
)
return task_trace
class TaskService:
PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state"
@ -161,12 +228,14 @@ class TaskService:
if task_model.state == "COMPLETED":
event_type = ProcessInstanceEventType.task_completed.value
timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time()
process_instance_event, _process_instance_error_detail = TaskService.add_event_to_process_instance(
self.process_instance,
event_type,
task_guid=task_model.guid,
timestamp=timestamp,
add_to_db_session=False,
process_instance_event, _process_instance_error_detail = (
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance,
event_type,
task_guid=task_model.guid,
timestamp=timestamp,
add_to_db_session=False,
)
)
self.process_instance_events[task_model.guid] = process_instance_event
@ -488,6 +557,19 @@ class TaskService:
setattr(task_model, task_model_data_column, task_data_hash)
return json_data_dict
@classmethod
def bpmn_process_for_called_activity_or_top_level_process(cls, task_model: TaskModel) -> BpmnProcessModel:
"""Returns either the bpmn process for the call activity calling the process or the top level bpmn process.
For example, process_modelA has processA which has a call activity that calls processB which is inside of process_modelB.
processB has subprocessA which has taskA. Using taskA this method should return processB and then that can be used with
the spec reference cache to find process_modelB.
"""
(bpmn_processes, _task_models) = TaskService.task_models_of_parent_bpmn_processes(
task_model, stop_on_first_call_activity=True
)
return bpmn_processes[0]
@classmethod
def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]:
bpmn_process_ids = [p.id for p in bpmn_processes]
@ -500,27 +582,53 @@ class TaskService:
@classmethod
def task_models_of_parent_bpmn_processes(
cls, task_model: TaskModel
cls, task_model: TaskModel, stop_on_first_call_activity: Optional[bool] = False
) -> Tuple[list[BpmnProcessModel], list[TaskModel]]:
"""Returns the list of task models that are associated with the paren bpmn process.
Example: TopLevelProcess has SubprocessTaskA which has CallActivityTaskA which has ScriptTaskA.
SubprocessTaskA corresponds to SpiffSubprocess1.
CallActivityTaskA corresponds to SpiffSubprocess2.
Using ScriptTaskA this will return:
(
[TopLevelProcess, SpiffSubprocess1, SpiffSubprocess2],
[SubprocessTaskA, CallActivityTaskA]
)
If stop_on_first_call_activity it will stop when it reaches the first task model with a type of 'CallActivity'.
This will change the return value in the example to:
(
[SpiffSubprocess2],
[CallActivityTaskA]
)
"""
bpmn_process = task_model.bpmn_process
task_models: list[TaskModel] = []
bpmn_processes: list[BpmnProcessModel] = [bpmn_process]
if bpmn_process.guid is not None:
parent_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
if parent_task_model is not None:
b, t = cls.task_models_of_parent_bpmn_processes(parent_task_model)
return (bpmn_processes + b, [parent_task_model] + t)
task_models.append(parent_task_model)
if not stop_on_first_call_activity or parent_task_model.task_definition.typename != "CallActivity":
if parent_task_model is not None:
b, t = cls.task_models_of_parent_bpmn_processes(
parent_task_model, stop_on_first_call_activity=stop_on_first_call_activity
)
return (b + bpmn_processes, t + task_models)
return (bpmn_processes, task_models)
@classmethod
def full_bpmn_process_path(cls, bpmn_process: BpmnProcessModel) -> list[str]:
"""Returns a list of bpmn process identifiers pointing the given bpmn_process."""
bpmn_process_identifiers: list[str] = [bpmn_process.bpmn_process_definition.bpmn_identifier]
if bpmn_process.direct_parent_process_id is not None:
parent_bpmn_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first()
if parent_bpmn_process is not None:
# always prepend new identifiers since they come first in the path
bpmn_process_identifiers = cls.full_bpmn_process_path(parent_bpmn_process) + bpmn_process_identifiers
bpmn_process_identifiers: list[str] = []
if bpmn_process.guid:
task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
(
parent_bpmn_processes,
_task_models_of_parent_bpmn_processes,
) = TaskService.task_models_of_parent_bpmn_processes(task_model)
for parent_bpmn_process in parent_bpmn_processes:
bpmn_process_identifiers.append(parent_bpmn_process.bpmn_process_definition.bpmn_identifier)
bpmn_process_identifiers.append(bpmn_process.bpmn_process_definition.bpmn_identifier)
return bpmn_process_identifiers
@classmethod
@ -594,60 +702,30 @@ class TaskService:
if json_data_dict is not None:
json_data_dicts[json_data_dict["hash"]] = json_data_dict
# TODO: move to process_instance_service once we clean it and the processor up
@classmethod
def add_event_to_process_instance(
cls,
process_instance: ProcessInstanceModel,
event_type: str,
task_guid: Optional[str] = None,
user_id: Optional[int] = None,
exception: Optional[Exception] = None,
timestamp: Optional[float] = None,
add_to_db_session: Optional[bool] = True,
) -> Tuple[ProcessInstanceEventModel, Optional[ProcessInstanceErrorDetailModel]]:
if user_id is None and hasattr(g, "user") and g.user:
user_id = g.user.id
if timestamp is None:
timestamp = time.time()
process_instance_event = ProcessInstanceEventModel(
process_instance_id=process_instance.id, event_type=event_type, timestamp=timestamp, user_id=user_id
def get_extensions_from_task_model(cls, task_model: TaskModel) -> dict:
task_definition = task_model.task_definition
extensions: dict = (
task_definition.properties_json["extensions"] if "extensions" in task_definition.properties_json else {}
)
if task_guid:
process_instance_event.task_guid = task_guid
return extensions
if add_to_db_session:
db.session.add(process_instance_event)
@classmethod
def get_spec_reference_from_bpmn_process(cls, bpmn_process: BpmnProcessModel) -> SpecReferenceCache:
"""Get the bpmn file for a given task model.
process_instance_error_detail = None
if exception is not None:
# truncate to avoid database errors on large values. We observed that text in mysql is 65K.
stacktrace = traceback.format_exc().split("\n")
message = str(exception)[0:1023]
task_line_number = None
task_line_contents = None
task_trace = None
task_offset = None
if isinstance(exception, WorkflowTaskException) or (
isinstance(exception, ApiError) and exception.error_code == "task_error"
):
task_line_number = exception.line_number
task_line_contents = exception.error_line[0:255]
task_trace = exception.task_trace
task_offset = exception.offset
process_instance_error_detail = ProcessInstanceErrorDetailModel(
process_instance_event=process_instance_event,
message=message,
stacktrace=stacktrace,
task_line_number=task_line_number,
task_line_contents=task_line_contents,
task_trace=task_trace,
task_offset=task_offset,
This involves several queries so avoid calling in a tight loop.
"""
bpmn_process_definition = bpmn_process.bpmn_process_definition
spec_reference: Optional[SpecReferenceCache] = SpecReferenceCache.query.filter_by(
identifier=bpmn_process_definition.bpmn_identifier, type="process"
).first()
if spec_reference is None:
raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {bpmn_process_definition.bpmn_identifier}"
)
return spec_reference
if add_to_db_session:
db.session.add(process_instance_error_detail)
return (process_instance_event, process_instance_error_detail)
@classmethod
def get_name_for_display(cls, entity: Union[TaskDefinitionModel, BpmnProcessDefinitionModel]) -> str:
return entity.bpmn_name or entity.bpmn_identifier

View File

@ -25,6 +25,7 @@ from spiffworkflow_backend.services.assertion_service import safe_assertion
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.task_service import StartAndEndTimes
from spiffworkflow_backend.services.task_service import TaskService
@ -395,7 +396,7 @@ class WorkflowExecutionService:
self.process_bpmn_messages()
self.queue_waiting_receive_messages()
except WorkflowTaskException as wte:
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model,
ProcessInstanceEventType.task_failed.value,
exception=wte,

View File

@ -4,40 +4,88 @@
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1g3dpd7</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1g3dpd7" sourceRef="StartEvent_1" targetRef="do_nothing" />
<bpmn:sequenceFlow id="Flow_1g3dpd7" sourceRef="StartEvent_1" targetRef="level_2b_script_task" />
<bpmn:endEvent id="Event_18dla68">
<bpmn:documentation># Main Workflow
Hello {{my_other_var}}
</bpmn:documentation>
<bpmn:incoming>Flow_0l0w6u9</bpmn:incoming>
<bpmn:incoming>Flow_0wt4dbv</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0l0w6u9" sourceRef="do_nothing" targetRef="Event_18dla68" />
<bpmn:scriptTask id="do_nothing" name="Do Nothing">
<bpmn:scriptTask id="level_2b_script_task" name="level_2b_script_task">
<bpmn:incoming>Flow_1g3dpd7</bpmn:incoming>
<bpmn:outgoing>Flow_0l0w6u9</bpmn:outgoing>
<bpmn:outgoing>Flow_1mvoqe4</bpmn:outgoing>
<bpmn:script>a = 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1mvoqe4" sourceRef="level_2b_script_task" targetRef="level_2b_subprocess" />
<bpmn:subProcess id="level_2b_subprocess" name="level_2b_subprocess">
<bpmn:incoming>Flow_1mvoqe4</bpmn:incoming>
<bpmn:outgoing>Flow_0wt4dbv</bpmn:outgoing>
<bpmn:startEvent id="Event_0fpb33c">
<bpmn:outgoing>Flow_18nmqzh</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_18nmqzh" sourceRef="Event_0fpb33c" targetRef="level_2b_subprocess_script_task" />
<bpmn:endEvent id="Event_1x11xe3">
<bpmn:incoming>Flow_1srjuev</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1srjuev" sourceRef="level_2b_subprocess_script_task" targetRef="Event_1x11xe3" />
<bpmn:scriptTask id="level_2b_subprocess_script_task" name="level_2b_subprocess_script_task">
<bpmn:incoming>Flow_18nmqzh</bpmn:incoming>
<bpmn:outgoing>Flow_1srjuev</bpmn:outgoing>
<bpmn:script>z = 1</bpmn:script>
</bpmn:scriptTask>
</bpmn:subProcess>
<bpmn:sequenceFlow id="Flow_0wt4dbv" sourceRef="level_2b_subprocess" targetRef="Event_18dla68" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Level2b">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_18dla68_di" bpmnElement="Event_18dla68">
<dc:Bounds x="432" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1reqred_di" bpmnElement="do_nothing">
<bpmndi:BPMNShape id="Activity_1reqred_di" bpmnElement="level_2b_script_task">
<dc:Bounds x="260" y="77" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_18dla68_di" bpmnElement="Event_18dla68">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1u9mmh7_di" bpmnElement="level_2b_subprocess">
<dc:Bounds x="410" y="77" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1g3dpd7_di" bpmnElement="Flow_1g3dpd7">
<di:waypoint x="215" y="117" />
<di:waypoint x="260" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0l0w6u9_di" bpmnElement="Flow_0l0w6u9">
<bpmndi:BPMNEdge id="Flow_1mvoqe4_di" bpmnElement="Flow_1mvoqe4">
<di:waypoint x="360" y="117" />
<di:waypoint x="432" y="117" />
<di:waypoint x="410" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0wt4dbv_di" bpmnElement="Flow_0wt4dbv">
<di:waypoint x="510" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
<bpmndi:BPMNDiagram id="BPMNDiagram_14p97s9">
<bpmndi:BPMNPlane id="BPMNPlane_1qs3lh3" bpmnElement="level_2b_subprocess">
<bpmndi:BPMNShape id="Event_0fpb33c_di" bpmnElement="Event_0fpb33c">
<dc:Bounds x="332" y="212" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1x11xe3_di" bpmnElement="Event_1x11xe3">
<dc:Bounds x="572" y="212" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0oiioqq_di" bpmnElement="level_2b_subprocess_script_task">
<dc:Bounds x="420" y="190" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_18nmqzh_di" bpmnElement="Flow_18nmqzh">
<di:waypoint x="368" y="230" />
<di:waypoint x="420" y="230" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1srjuev_di" bpmnElement="Flow_1srjuev">
<di:waypoint x="520" y="230" />
<di:waypoint x="572" y="230" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>

View File

@ -4,8 +4,8 @@
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1g3dpd7</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1g3dpd7" sourceRef="StartEvent_1" targetRef="do_nothing" />
<bpmn:sequenceFlow id="Flow_0qdgvah" sourceRef="do_nothing" targetRef="Event_18dla68" />
<bpmn:sequenceFlow id="Flow_1g3dpd7" sourceRef="StartEvent_1" targetRef="level_3_script_task" />
<bpmn:sequenceFlow id="Flow_0qdgvah" sourceRef="level_3_script_task" targetRef="Event_18dla68" />
<bpmn:endEvent id="Event_18dla68">
<bpmn:documentation># Main Workflow
Hello {{my_other_var}}
@ -13,7 +13,7 @@ Hello {{my_other_var}}
</bpmn:documentation>
<bpmn:incoming>Flow_0qdgvah</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="do_nothing" name="Do Nothing">
<bpmn:scriptTask id="level_3_script_task" name="Do Nothing">
<bpmn:incoming>Flow_1g3dpd7</bpmn:incoming>
<bpmn:outgoing>Flow_0qdgvah</bpmn:outgoing>
<bpmn:script>a = 3</bpmn:script>
@ -27,7 +27,7 @@ Hello {{my_other_var}}
<bpmndi:BPMNShape id="Event_18dla68_di" bpmnElement="Event_18dla68">
<dc:Bounds x="432" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1po21cu_di" bpmnElement="do_nothing">
<bpmndi:BPMNShape id="Activity_1po21cu_di" bpmnElement="level_3_script_task">
<dc:Bounds x="280" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1g3dpd7_di" bpmnElement="Flow_1g3dpd7">

View File

@ -0,0 +1,11 @@
{
"description": "",
"display_name": "call activity with nested calls",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "call_activity_nested.bpmn",
"primary_process_id": "Level1"
}

View File

@ -9,7 +9,7 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend import db
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.tasks_controller import _interstitial_stream
from spiffworkflow_backend.routes.tasks_controller import _dequeued_interstitial_stream
class TestForGoodErrors(BaseTest):
@ -22,7 +22,7 @@ class TestForGoodErrors(BaseTest):
with_super_admin_user: UserModel,
) -> Any:
# Call this to assure all engine-steps are fully processed before we search for human tasks.
_interstitial_stream(process_instance_id)
_dequeued_interstitial_stream(process_instance_id)
"""Returns the next available user task for a given process instance, if possible."""
human_tasks = (

View File

@ -33,7 +33,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.tasks_controller import _interstitial_stream
from spiffworkflow_backend.routes.tasks_controller import _dequeued_interstitial_stream
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
@ -1630,7 +1630,7 @@ class TestProcessApi(BaseTest):
headers=self.logged_in_headers(with_super_admin_user),
)
# Call this to assure all engine-steps are fully processed.
_interstitial_stream(process_instance_id)
_dequeued_interstitial_stream(process_instance_id)
assert response.json is not None
assert response.json["next_task"] is not None
@ -1694,7 +1694,7 @@ class TestProcessApi(BaseTest):
# Rather that call the API and deal with the Server Side Events, call the loop directly and covert it to
# a list. It tests all of our code. No reason to test Flasks SSE support.
stream_results = _interstitial_stream(process_instance_id)
stream_results = _dequeued_interstitial_stream(process_instance_id)
results = list(stream_results)
# strip the "data:" prefix and convert remaining string to dict.
json_results = list(map(lambda x: json.loads(x[5:]), results)) # type: ignore
@ -1717,7 +1717,7 @@ class TestProcessApi(BaseTest):
assert response.json is not None
# we should now be on a task that does not belong to the original user, and the interstitial page should know this.
results = list(_interstitial_stream(process_instance_id))
results = list(_dequeued_interstitial_stream(process_instance_id))
json_results = list(map(lambda x: json.loads(x[5:]), results)) # type: ignore
assert len(results) == 1
assert json_results[0]["state"] == "READY"
@ -1732,9 +1732,9 @@ class TestProcessApi(BaseTest):
)
# We should now be on the end task with a valid message, even after loading it many times.
list(_interstitial_stream(process_instance_id))
list(_interstitial_stream(process_instance_id))
results = list(_interstitial_stream(process_instance_id))
list(_dequeued_interstitial_stream(process_instance_id))
list(_dequeued_interstitial_stream(process_instance_id))
results = list(_dequeued_interstitial_stream(process_instance_id))
json_results = list(map(lambda x: json.loads(x[5:]), results)) # type: ignore
assert len(json_results) == 1
assert json_results[0]["state"] == "COMPLETED"

View File

@ -66,6 +66,11 @@ class TestGetAllPermissions(BaseTest):
"uri": "/process-data-file-download/hey:group:*",
"permissions": ["read"],
},
{
"group_identifier": "my_test_group",
"uri": "/event-error-details/hey:group:*",
"permissions": ["read"],
},
]
permissions = GetAllPermissions().run(script_attributes_context)

View File

@ -124,6 +124,7 @@ class TestAuthorizationService(BaseTest):
"""Test_explode_permissions_all_on_process_group."""
expected_permissions = sorted(
[
("/event-error-details/some-process-group:some-process-model:*", "read"),
("/logs/some-process-group:some-process-model:*", "read"),
("/process-data/some-process-group:some-process-model:*", "read"),
(
@ -173,6 +174,7 @@ class TestAuthorizationService(BaseTest):
) -> None:
"""Test_explode_permissions_start_on_process_group."""
expected_permissions = [
("/event-error-details/some-process-group:some-process-model:*", "read"),
(
"/logs/some-process-group:some-process-model:*",
"read",
@ -202,6 +204,7 @@ class TestAuthorizationService(BaseTest):
"""Test_explode_permissions_all_on_process_model."""
expected_permissions = sorted(
[
("/event-error-details/some-process-group:some-process-model/*", "read"),
("/logs/some-process-group:some-process-model/*", "read"),
(
"/process-data-file-download/some-process-group:some-process-model/*",
@ -247,6 +250,10 @@ class TestAuthorizationService(BaseTest):
) -> None:
"""Test_explode_permissions_start_on_process_model."""
expected_permissions = [
(
"/event-error-details/some-process-group:some-process-model/*",
"read",
),
(
"/logs/some-process-group:some-process-model/*",
"read",

View File

@ -253,7 +253,9 @@ class TestProcessInstanceProcessor(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
# this task will be found within subprocesses
spiff_task = processor.__class__.get_task_by_bpmn_identifier("do_nothing", processor.bpmn_process_instance)
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
"level_3_script_task", processor.bpmn_process_instance
)
assert spiff_task is not None
assert spiff_task.state == TaskState.COMPLETED

View File

@ -2,7 +2,6 @@
import re
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -12,7 +11,6 @@ from spiffworkflow_backend.models.process_instance_metadata import (
)
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -29,12 +27,9 @@ class TestProcessModel(BaseTest):
def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
"test_group/call_activity_test",
# bpmn_file_name="call_activity_test.bpmn",
@ -49,12 +44,9 @@ class TestProcessModel(BaseTest):
def test_can_run_process_model_with_call_activities_when_not_in_same_directory(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
@ -80,12 +72,9 @@ class TestProcessModel(BaseTest):
def test_can_run_process_model_with_call_activities_when_process_identifier_is_not_in_database(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
@ -116,9 +105,7 @@ class TestProcessModel(BaseTest):
def test_extract_metadata(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
process_model = self.create_process_model_with_metadata()

View File

@ -1,10 +1,8 @@
"""Test_process_model_service."""
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -14,12 +12,8 @@ class TestProcessModelService(BaseTest):
def test_can_update_specified_attributes(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_update_specified_attributes."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
"test_group/hello_world",
bpmn_file_name="hello_world.bpmn",

View File

@ -0,0 +1,158 @@
from flask import Flask
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.task_service import TaskService
class TestTaskService(BaseTest):
def test_can_get_full_bpmn_process_path(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
assert process_instance.status == "complete"
bpmn_process_level_2b = (
BpmnProcessModel.query.join(BpmnProcessDefinitionModel)
.filter(BpmnProcessDefinitionModel.bpmn_identifier == "Level2b")
.first()
)
assert bpmn_process_level_2b is not None
full_bpnmn_process_path = TaskService.full_bpmn_process_path(bpmn_process_level_2b)
assert full_bpnmn_process_path == ["Level1", "Level2", "Level2b"]
bpmn_process_level_3 = (
BpmnProcessModel.query.join(BpmnProcessDefinitionModel)
.filter(BpmnProcessDefinitionModel.bpmn_identifier == "Level3")
.first()
)
assert bpmn_process_level_3 is not None
full_bpnmn_process_path = TaskService.full_bpmn_process_path(bpmn_process_level_3)
assert full_bpnmn_process_path == ["Level1", "Level2", "Level3"]
def test_task_models_of_parent_bpmn_processes_stop_on_first_call_activity(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
assert process_instance.status == "complete"
task_model_level_2b = (
TaskModel.query.join(TaskDefinitionModel)
.filter(TaskDefinitionModel.bpmn_identifier == "level_2b_subprocess_script_task")
.first()
)
assert task_model_level_2b is not None
(bpmn_processes, task_models) = TaskService.task_models_of_parent_bpmn_processes(
task_model_level_2b, stop_on_first_call_activity=True
)
assert len(bpmn_processes) == 2
assert len(task_models) == 2
assert bpmn_processes[0].bpmn_process_definition.bpmn_identifier == "Level2b"
assert task_models[0].task_definition.bpmn_identifier == "level2b_second_call"
task_model_level_3 = (
TaskModel.query.join(TaskDefinitionModel)
.filter(TaskDefinitionModel.bpmn_identifier == "level_3_script_task")
.first()
)
assert task_model_level_3 is not None
(bpmn_processes, task_models) = TaskService.task_models_of_parent_bpmn_processes(
task_model_level_3, stop_on_first_call_activity=True
)
assert len(bpmn_processes) == 1
assert len(task_models) == 1
assert bpmn_processes[0].bpmn_process_definition.bpmn_identifier == "Level3"
assert task_models[0].task_definition.bpmn_identifier == "level3"
def test_bpmn_process_for_called_activity_or_top_level_process(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
assert process_instance.status == "complete"
task_model_level_2b = (
TaskModel.query.join(TaskDefinitionModel)
.filter(TaskDefinitionModel.bpmn_identifier == "level_2b_subprocess_script_task")
.first()
)
assert task_model_level_2b is not None
bpmn_process = TaskService.bpmn_process_for_called_activity_or_top_level_process(task_model_level_2b)
assert bpmn_process is not None
assert bpmn_process.bpmn_process_definition.bpmn_identifier == "Level2b"
task_model_level_3 = (
TaskModel.query.join(TaskDefinitionModel)
.filter(TaskDefinitionModel.bpmn_identifier == "level_3_script_task")
.first()
)
assert task_model_level_3 is not None
bpmn_process = TaskService.bpmn_process_for_called_activity_or_top_level_process(task_model_level_3)
assert bpmn_process.bpmn_process_definition.bpmn_identifier == "Level3"

View File

@ -101,7 +101,7 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => {
// Intended to be run from the process model show page
Cypress.Commands.add(
'runPrimaryBpmnFile',
(expectAutoRedirectToHumanTask = false) => {
(expectAutoRedirectToHumanTask = false, returnToProcessModelShow = true) => {
// cy.getBySel('start-process-instance').click();
// click on button with text Start
cy.get('button')
@ -112,11 +112,12 @@ Cypress.Commands.add(
cy.url().should('include', `/tasks/`);
cy.contains('Task: ', { timeout: 30000 });
} else {
cy.contains(/Process Instance.*[kK]icked [oO]ff/);
cy.reload(true);
cy.contains('Process Model:').should('exist');
cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist');
cy.getBySel('process-model-show-permissions-loaded').should('exist');
cy.url().should('include', `/interstitial`);
cy.contains('Status: Completed');
if (returnToProcessModelShow) {
cy.getBySel('process-model-breadcrumb-link').click();
cy.getBySel('process-model-show-permissions-loaded').should('exist');
}
}
}
);

View File

@ -8,7 +8,10 @@ export default function InstructionsForEndUser({ task }: any) {
}
let instructions =
'There is no additional instructions or information for this task.';
const { properties } = task;
let { properties } = task;
if (!properties) {
properties = task.extensions;
}
const { instructionsForEndUser } = properties;
if (instructionsForEndUser) {
instructions = instructionsForEndUser;

View File

@ -71,14 +71,20 @@ export default function ProcessBreadcrumb({ hotCrumbs }: OwnProps) {
if (crumb.linkLastItem) {
let apiBase = '/admin/process-groups';
let dataQaTag = '';
if (crumb.entityType.startsWith('process-model')) {
apiBase = '/admin/process-models';
dataQaTag = 'process-model-breadcrumb-link';
}
const fullUrl = `${apiBase}/${modifyProcessIdentifierForPathParam(
processEntity.id
)}`;
breadcrumbs.push(
<BreadcrumbItem key={processEntity.id} href={fullUrl}>
<BreadcrumbItem
key={processEntity.id}
href={fullUrl}
data-qa={dataQaTag}
>
{processEntity.display_name}
</BreadcrumbItem>
);

View File

@ -30,6 +30,7 @@ import {
DATE_FORMAT_FOR_DISPLAY,
} from '../config';
import {
capitalizeFirstLetter,
convertDateAndTimeStringsToSeconds,
convertDateObjectToFormattedHoursMinutes,
convertSecondsToFormattedDateString,
@ -721,6 +722,9 @@ export default function ProcessInstanceListTable({
);
};
const formatProcessInstanceStatus = (_row: any, value: any) => {
return capitalizeFirstLetter((value || '').replaceAll('_', ' '));
};
const processStatusSearch = () => {
return (
<MultiSelect
@ -734,7 +738,7 @@ export default function ProcessInstanceListTable({
setRequiresRefilter(true);
}}
itemToString={(item: any) => {
return item || '';
return formatProcessInstanceStatus(null, item);
}}
selectionFeedback="top-after-reopen"
selectedItems={processStatusSelection}
@ -1358,6 +1362,7 @@ export default function ProcessInstanceListTable({
id: formatProcessInstanceId,
process_model_identifier: formatProcessModelIdentifier,
process_model_display_name: formatProcessModelDisplayName,
status: formatProcessInstanceStatus,
start_in_seconds: formatSecondsForDisplay,
end_in_seconds: formatSecondsForDisplay,
updated_at_in_seconds: formatSecondsForDisplay,
@ -1411,7 +1416,7 @@ export default function ProcessInstanceListTable({
return getHeaderLabel((column as any).Header);
});
if (showActionsColumn) {
headers.push('Actions');
headers.push('Action');
}
const rows = processInstances.map((row: any) => {

View File

@ -41,6 +41,7 @@ export interface EventDefinition {
export interface Task {
id: number;
guid: string;
process_instance_id: number;
bpmn_identifier: string;
bpmn_name?: string;
bpmn_process_direct_parent_guid: string;
@ -52,6 +53,13 @@ export interface Task {
task_definition_properties_json: TaskDefinitionPropertiesJson;
event_definition?: EventDefinition;
process_model_display_name: string;
process_model_identifier: string;
name_for_display: string;
can_complete: boolean;
form_schema: any;
form_ui_schema: any;
}
export interface ProcessInstanceTask {

View File

@ -7,7 +7,6 @@ import MyTasks from './MyTasks';
import CompletedInstances from './CompletedInstances';
import CreateNewInstance from './CreateNewInstance';
import InProgressInstances from './InProgressInstances';
import ProcessInterstitial from './ProcessInterstitial';
export default function HomePageRoutes() {
const location = useLocation();
@ -56,10 +55,6 @@ export default function HomePageRoutes() {
<Route path="my-tasks" element={<MyTasks />} />
<Route path=":process_instance_id/:task_id" element={<TaskShow />} />
<Route path="grouped" element={<InProgressInstances />} />
<Route
path="process/:process_instance_id/interstitial"
element={<ProcessInterstitial />}
/>
<Route path="completed-instances" element={<CompletedInstances />} />
<Route path="create-new-instance" element={<CreateNewInstance />} />
</Routes>

View File

@ -213,6 +213,13 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) {
setEventErrorDetails(errorObject);
},
});
} else {
const notAuthorized: ProcessInstanceEventErrorDetail = {
id: 0,
message: 'You are not authorized to view error details',
stacktrace: [],
};
setEventErrorDetails(notAuthorized);
}
};

View File

@ -23,6 +23,8 @@ export default function ProcessInterstitial() {
}, []);
const { addError } = useAPIError();
const processInstanceShowPageBaseUrl = `/admin/process-instances/for-me/${params.modified_process_model_identifier}`;
useEffect(() => {
fetchEventSource(
`${BACKEND_BASE_URL}/tasks/${params.process_instance_id}`,
@ -139,7 +141,10 @@ export default function ProcessInterstitial() {
entityType: 'process-model-id',
linkLastItem: true,
},
[`Process Instance Id: ${lastTask.process_instance_id}`],
[
`Process Instance: ${params.process_instance_id}`,
`${processInstanceShowPageBaseUrl}/${params.process_instance_id}`,
],
]}
/>
<div style={{ display: 'flex', alignItems: 'center' }}>

View File

@ -6,7 +6,7 @@ export default function ProcessRoutes() {
return (
<Routes>
<Route
path=":process_model_identifier/:process_instance_id/interstitial"
path=":modified_process_model_identifier/:process_instance_id/interstitial"
element={<ProcessInterstitial />}
/>
</Routes>

View File

@ -18,7 +18,7 @@ import Form from '../themes/carbon';
import HttpService from '../services/HttpService';
import useAPIError from '../hooks/UseApiError';
import { modifyProcessIdentifierForPathParam } from '../helpers';
import { ProcessInstanceTask } from '../interfaces';
import { Task } from '../interfaces';
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
import InstructionsForEndUser from '../components/InstructionsForEndUser';
@ -95,7 +95,7 @@ enum FormSubmitType {
}
export default function TaskShow() {
const [task, setTask] = useState<ProcessInstanceTask | null>(null);
const [task, setTask] = useState<Task | null>(null);
const [userTasks] = useState(null);
const params = useParams();
const navigate = useNavigate();
@ -105,7 +105,7 @@ export default function TaskShow() {
const { addError, removeError } = useAPIError();
const navigateToInterstitial = (myTask: ProcessInstanceTask) => {
const navigateToInterstitial = (myTask: Task) => {
navigate(
`/process/${modifyProcessIdentifierForPathParam(
myTask.process_model_identifier
@ -114,7 +114,7 @@ export default function TaskShow() {
};
useEffect(() => {
const processResult = (result: ProcessInstanceTask) => {
const processResult = (result: Task) => {
setTask(result);
setDisabled(false);
if (!result.can_complete) {
@ -206,7 +206,7 @@ export default function TaskShow() {
const taskUrl = `/tasks/${params.process_instance_id}/${userTask.id}`;
if (userTask.id === params.task_id) {
selectedTabIndex = index;
return <Tab selected>{userTask.title}</Tab>;
return <Tab selected>{userTask.name_for_display}</Tab>;
}
if (userTask.state === 'COMPLETED') {
return (
@ -214,12 +214,12 @@ export default function TaskShow() {
onClick={() => navigate(taskUrl)}
data-qa={`form-nav-${userTask.name}`}
>
{userTask.title}
{userTask.name_for_display}
</Tab>
);
}
if (userTask.state === 'FUTURE') {
return <Tab disabled>{userTask.title}</Tab>;
return <Tab disabled>{userTask.name_for_display}</Tab>;
}
if (userTask.state === 'READY') {
return (
@ -227,7 +227,7 @@ export default function TaskShow() {
onClick={() => navigate(taskUrl)}
data-qa={`form-nav-${userTask.name}`}
>
{userTask.title}
{userTask.name_for_display}
</Tab>
);
}
@ -297,7 +297,7 @@ export default function TaskShow() {
let taskData = task.data;
let jsonSchema = task.form_schema;
let reactFragmentToHideSubmitButton = null;
if (task.type === 'Manual Task') {
if (task.typename === 'ManualTask') {
taskData = {};
jsonSchema = {
type: 'object',
@ -333,9 +333,9 @@ export default function TaskShow() {
if (task.state === 'READY') {
let submitButtonText = 'Submit';
let saveAsDraftButton = null;
if (task.type === 'Manual Task') {
if (task.typename === 'ManualTask') {
submitButtonText = 'Continue';
} else if (task.type === 'User Task') {
} else if (task.typename === 'UserTask') {
saveAsDraftButton = (
<Button
id="save-as-draft-button"
@ -404,12 +404,13 @@ export default function TaskShow() {
task.process_model_identifier
)}/${params.process_instance_id}`,
],
[`Task: ${task.title || task.id}`],
[`Task: ${task.name_for_display || task.id}`],
]}
/>
<div>{buildTaskNavigation()}</div>
<h3>
Task: {task.title} ({task.process_model_display_name}){statusString}
Task: {task.name_for_display} ({task.process_model_display_name})
{statusString}
</h3>
<InstructionsForEndUser task={task} />
{formElement()}