From 320d1b4083968ab9237cd00d9bb844899b5953d0 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 19 Apr 2023 11:24:54 -0400 Subject: [PATCH 01/11] store process instance errors in the db w/ burnettk --- .../{44a8f46cc508_.py => c95031498e62_.py} | 21 +++++- .../models/process_instance_error_detail.py | 17 +++++ .../models/process_instance_event.py | 4 ++ .../models/process_instance_metadata.py | 3 - .../routes/process_instances_controller.py | 44 ++++++------ .../routes/tasks_controller.py | 4 +- .../services/error_handling_service.py | 66 ++++++++--------- .../services/process_instance_processor.py | 70 +++---------------- .../process_instance_queue_service.py | 5 +- .../services/process_instance_service.py | 15 ++-- .../services/task_service.py | 34 +++++++++ 11 files changed, 153 insertions(+), 130 deletions(-) rename spiffworkflow-backend/migrations/versions/{44a8f46cc508_.py => c95031498e62_.py} (97%) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py diff --git a/spiffworkflow-backend/migrations/versions/44a8f46cc508_.py b/spiffworkflow-backend/migrations/versions/c95031498e62_.py similarity index 97% rename from spiffworkflow-backend/migrations/versions/44a8f46cc508_.py rename to spiffworkflow-backend/migrations/versions/c95031498e62_.py index da70af012..c375745cb 100644 --- a/spiffworkflow-backend/migrations/versions/44a8f46cc508_.py +++ b/spiffworkflow-backend/migrations/versions/c95031498e62_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 44a8f46cc508 +Revision ID: c95031498e62 Revises: -Create Date: 2023-04-17 15:40:28.658588 +Create Date: 2023-04-19 10:35:25.813002 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '44a8f46cc508' +revision = 'c95031498e62' down_revision = None branch_labels = None depends_on = None @@ -463,6 +463,17 @@ def upgrade(): batch_op.create_index(batch_op.f('ix_message_instance_correlation_rule_message_instance_id'), ['message_instance_id'], unique=False) batch_op.create_index(batch_op.f('ix_message_instance_correlation_rule_name'), ['name'], unique=False) + op.create_table('process_instance_error_detail', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_event_id', sa.Integer(), nullable=False), + sa.Column('message', sa.String(length=1024), nullable=False), + sa.Column('stacktrace', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['process_instance_event_id'], ['process_instance_event.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('process_instance_error_detail', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_process_instance_error_detail_process_instance_event_id'), ['process_instance_event_id'], unique=False) + op.create_table('human_task_user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('human_task_id', sa.Integer(), nullable=False), @@ -486,6 +497,10 @@ def downgrade(): batch_op.drop_index(batch_op.f('ix_human_task_user_human_task_id')) op.drop_table('human_task_user') + with op.batch_alter_table('process_instance_error_detail', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_process_instance_error_detail_process_instance_event_id')) + + op.drop_table('process_instance_error_detail') with op.batch_alter_table('message_instance_correlation_rule', schema=None) as batch_op: batch_op.drop_index(batch_op.f('ix_message_instance_correlation_rule_name')) batch_op.drop_index(batch_op.f('ix_message_instance_correlation_rule_message_instance_id')) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py new file mode 100644 index 000000000..91324c772 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py @@ -0,0 +1,17 @@ +from sqlalchemy.orm import relationship +from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from spiffworkflow_backend.models.db import db + + +class ProcessInstanceErrorDetailModel(SpiffworkflowBaseDBModel): + __tablename__ = "process_instance_error_detail" + id: int = db.Column(db.Integer, primary_key=True) + + process_instance_event_id: int = db.Column(ForeignKey("process_instance_event.id"), nullable=False, index=True) + process_instance_event = relationship('ProcessInstanceEventModel') + + message: str = db.Column(db.String(1024), nullable=False) + + # this should be 65k in mysql + stacktrace: str = db.Column(db.Text(), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py index fe920b57e..936e4d9d3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -1,4 +1,5 @@ from __future__ import annotations +from sqlalchemy.orm import relationship from typing import Any @@ -13,6 +14,7 @@ from spiffworkflow_backend.models.user import UserModel # event types take the form [SUBJECT]_[PAST_TENSE_VERB] since subject is not always the same. class ProcessInstanceEventType(SpiffEnum): + process_instance_error = "process_instance_error" process_instance_resumed = "process_instance_resumed" process_instance_rewound_to_task = "process_instance_rewound_to_task" process_instance_suspended = "process_instance_suspended" @@ -37,6 +39,8 @@ class ProcessInstanceEventModel(SpiffworkflowBaseDBModel): user_id = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore + error_deatils = relationship("ProcessInstanceErrorDetailModel", cascade="delete") # type: ignore + @validates("event_type") def validate_event_type(self, key: str, value: Any) -> Any: return self.validate_enum_field(key, value, ProcessInstanceEventType) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index 4e00a6ca1..ef043a7e7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -1,4 +1,3 @@ -"""Process_instance_metadata.""" from dataclasses import dataclass from sqlalchemy import ForeignKey @@ -10,8 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @dataclass class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel): - """ProcessInstanceMetadataModel.""" - __tablename__ = "process_instance_metadata" __table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 6813f415b..1e6833c6a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -112,7 +112,6 @@ def process_instance_create( def process_instance_run( modified_process_model_identifier: str, process_instance_id: int, - do_engine_steps: bool = True, ) -> flask.wrappers.Response: """Process_instance_run.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -123,22 +122,22 @@ def process_instance_run( status_code=400, ) - processor = ProcessInstanceProcessor(process_instance) - - if do_engine_steps: - try: - processor.do_engine_steps(save=True) - except ( - ApiError, - ProcessInstanceIsNotEnqueuedError, - ProcessInstanceIsAlreadyLockedError, - ) as e: - ErrorHandlingService().handle_error(processor, e) - raise e - except Exception as e: - ErrorHandlingService().handle_error(processor, e) - # FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes. - # we need to recurse through all last tasks if the last task is a call activity or subprocess. + processor = None + try: + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + except ( + ApiError, + ProcessInstanceIsNotEnqueuedError, + ProcessInstanceIsAlreadyLockedError, + ) as e: + ErrorHandlingService.handle_error(process_instance, e) + raise e + except Exception as e: + ErrorHandlingService.handle_error(process_instance, e) + # FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes. + # we need to recurse through all last tasks if the last task is a call activity or subprocess. + if processor is not None: task = processor.bpmn_process_instance.last_task raise ApiError.from_task( error_code="unknown_exception", @@ -146,9 +145,10 @@ def process_instance_run( status_code=400, task=task, ) from e + raise e - if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: - MessageService.correlate_all_message_instances() + if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: + MessageService.correlate_all_message_instances() process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor) process_instance_data = processor.get_data() @@ -172,7 +172,7 @@ def process_instance_terminate( ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError, ) as e: - ErrorHandlingService().handle_error(processor, e) + ErrorHandlingService.handle_error(process_instance, e) raise e return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -193,7 +193,7 @@ def process_instance_suspend( ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError, ) as e: - ErrorHandlingService().handle_error(processor, e) + ErrorHandlingService.handle_error(process_instance, e) raise e return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -214,7 +214,7 @@ def process_instance_resume( ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError, ) as e: - ErrorHandlingService().handle_error(processor, e) + ErrorHandlingService.handle_error(process_instance, e) raise e return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 9baffd258..2a4ffc1de 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -215,7 +215,7 @@ def task_data_update( ) if json_data_dict is not None: TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) - ProcessInstanceProcessor.add_event_to_process_instance( + TaskService.add_event_to_process_instance( process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid ) try: @@ -428,7 +428,7 @@ def _task_submit_shared( if save_as_draft: task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) - ProcessInstanceService.update_form_task_data(processor, spiff_task, body, g.user) + ProcessInstanceService.update_form_task_data(process_instance, spiff_task, body, g.user) json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( task_model, spiff_task.data, "json_data_hash" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py index 4407c6db4..6696f3538 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py @@ -1,5 +1,6 @@ -"""Error_handling_service.""" from typing import Union +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType +from spiffworkflow_backend.services.task_service import TaskService from flask import current_app from flask import g @@ -11,56 +12,52 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.services.message_service import MessageService -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceProcessor, -) from spiffworkflow_backend.services.process_model_service import ProcessModelService class ErrorHandlingService: - """ErrorHandlingService.""" - MESSAGE_NAME = "SystemErrorMessage" - @staticmethod - def set_instance_status(instance_id: int, status: str) -> None: - """Set_instance_status.""" - instance = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == instance_id).first() - if instance: - instance.status = status - db.session.commit() - - def handle_error(self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception]) -> None: + @classmethod + def handle_error(cls, process_instance: ProcessInstanceModel, error: Union[ApiError, Exception]) -> None: """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" - process_model = ProcessModelService.get_process_model(_processor.process_model_identifier) - # First, suspend or fault the instance - if process_model.fault_or_suspend_on_exception == "suspend": - self.set_instance_status( - _processor.process_instance_model.id, - ProcessInstanceStatus.suspended.value, - ) - else: - # fault is the default - self.set_instance_status( - _processor.process_instance_model.id, - ProcessInstanceStatus.error.value, - ) + process_model = ProcessModelService.get_process_model(process_instance.process_model_identifier) + cls._update_process_instance_in_database(process_instance, error, process_model.fault_or_suspend_on_exception) # Second, send a bpmn message out, but only if an exception notification address is provided # This will create a new Send Message with correlation keys on the recipients and the message # body. if len(process_model.exception_notification_addresses) > 0: try: - self.handle_system_notification(_error, process_model, _processor) + cls._handle_system_notification(error, process_model, process_instance) except Exception as e: # hmm... what to do if a notification method fails. Probably log, at least current_app.logger.error(e) + @classmethod + def _update_process_instance_in_database(cls, process_instance: ProcessInstanceModel, error: Union[ApiError, Exception], fault_or_suspend_on_exception: str) -> None: + TaskService.add_event_to_process_instance(process_instance, ProcessInstanceEventType.process_instance_error.value, exception=error) + + # First, suspend or fault the instance + if fault_or_suspend_on_exception == "suspend": + cls._set_instance_status( + process_instance, + ProcessInstanceStatus.suspended.value, + ) + else: + # fault is the default + cls._set_instance_status( + process_instance, + ProcessInstanceStatus.error.value, + ) + + db.session.commit() + @staticmethod - def handle_system_notification( + def _handle_system_notification( error: Union[ApiError, Exception], process_model: ProcessModelInfo, - _processor: ProcessInstanceProcessor, + process_instance: ProcessInstanceModel, ) -> None: """Send a BPMN Message - which may kick off a waiting process.""" message_text = ( @@ -74,7 +71,7 @@ class ErrorHandlingService: if "user" in g: user_id = g.user.id else: - user_id = _processor.process_instance_model.process_initiator_id + user_id = process_instance.process_initiator_id message_instance = MessageInstanceModel( message_type="send", @@ -85,3 +82,8 @@ class ErrorHandlingService: db.session.add(message_instance) db.session.commit() MessageService.correlate_send_message(message_instance) + + @staticmethod + def _set_instance_status(process_instance: ProcessInstanceModel, status: str) -> None: + process_instance.status = status + db.session.add(process_instance) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 8cd8dfe41..1e2a42f33 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,5 +1,10 @@ """Process_instance_processor.""" + +# TODO: clean up this service for a clear distinction between it and the process_instance_service +# where this points to the pi service +import traceback import _strptime # type: ignore +from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel import copy import decimal import json @@ -1318,7 +1323,7 @@ class ProcessInstanceProcessor: db.session.bulk_save_objects(new_task_models.values()) TaskService.insert_or_update_json_data_records(new_json_data_dicts) - self.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id) + TaskService.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id) self.save() # Saving the workflow seems to reset the status self.suspend() @@ -1331,7 +1336,7 @@ class ProcessInstanceProcessor: def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None: """Reset a process to an earlier state.""" # raise Exception("This feature to reset a process instance to a given task is currently unavaiable") - cls.add_event_to_process_instance( + TaskService.add_event_to_process_instance( process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid ) @@ -1861,7 +1866,7 @@ class ProcessInstanceProcessor: TaskService.update_json_data_dicts_using_list(json_data_dict_list, json_data_dict_mapping) TaskService.insert_or_update_json_data_records(json_data_dict_mapping) - self.add_event_to_process_instance( + TaskService.add_event_to_process_instance( self.process_instance_model, ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid, @@ -1960,49 +1965,13 @@ class ProcessInstanceProcessor: return task return None - def get_nav_item(self, task: SpiffTask) -> Any: - """Get_nav_item.""" - for nav_item in self.bpmn_process_instance.get_nav_list(): - if nav_item["task_id"] == task.id: - return nav_item - return None - - def find_spec_and_field(self, spec_name: str, field_id: Union[str, int]) -> Any: - """Tracks down a form field by name in the process_instance spec(s), Returns a tuple of the task, and form.""" - process_instances = [self.bpmn_process_instance] - for task in self.bpmn_process_instance.get_ready_user_tasks(): - if task.process_instance not in process_instances: - process_instances.append(task.process_instance) - for process_instance in process_instances: - for spec in process_instance.spec.task_specs.values(): - if spec.name == spec_name: - if not hasattr(spec, "form"): - raise ApiError( - "invalid_spec", - "The spec name you provided does not contain a form.", - ) - - for field in spec.form.fields: - if field.id == field_id: - return spec, field - - raise ApiError( - "invalid_field", - f"The task '{spec_name}' has no field named '{field_id}'", - ) - - raise ApiError( - "invalid_spec", - f"Unable to find a task in the process_instance called '{spec_name}'", - ) - def terminate(self) -> None: """Terminate.""" self.bpmn_process_instance.cancel() self.save() self.process_instance_model.status = "terminated" db.session.add(self.process_instance_model) - self.add_event_to_process_instance( + TaskService.add_event_to_process_instance( self.process_instance_model, ProcessInstanceEventType.process_instance_terminated.value ) db.session.commit() @@ -2011,7 +1980,7 @@ class ProcessInstanceProcessor: """Suspend.""" self.process_instance_model.status = ProcessInstanceStatus.suspended.value db.session.add(self.process_instance_model) - self.add_event_to_process_instance( + TaskService.add_event_to_process_instance( self.process_instance_model, ProcessInstanceEventType.process_instance_suspended.value ) db.session.commit() @@ -2020,24 +1989,7 @@ class ProcessInstanceProcessor: """Resume.""" self.process_instance_model.status = ProcessInstanceStatus.waiting.value db.session.add(self.process_instance_model) - self.add_event_to_process_instance( + TaskService.add_event_to_process_instance( self.process_instance_model, ProcessInstanceEventType.process_instance_resumed.value ) db.session.commit() - - @classmethod - def add_event_to_process_instance( - cls, - process_instance: ProcessInstanceModel, - event_type: str, - task_guid: Optional[str] = None, - user_id: Optional[int] = None, - ) -> None: - if user_id is None and hasattr(g, "user") and g.user: - user_id = g.user.id - process_instance_event = ProcessInstanceEventModel( - process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id - ) - if task_guid: - process_instance_event.task_guid = task_guid - db.session.add(process_instance_event) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index dd733816b..59915eb1b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -1,5 +1,7 @@ import contextlib import time +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType +from spiffworkflow_backend.services.task_service import TaskService from typing import Generator from typing import List from typing import Optional @@ -24,8 +26,6 @@ class ProcessInstanceIsAlreadyLockedError(Exception): class ProcessInstanceQueueService: - """TODO: comment.""" - @classmethod def _configure_and_save_queue_entry( cls, process_instance: ProcessInstanceModel, queue_entry: ProcessInstanceQueueModel @@ -99,6 +99,7 @@ class ProcessInstanceQueueService: except Exception as ex: process_instance.status = ProcessInstanceStatus.error.value db.session.add(process_instance) + TaskService.add_event_to_process_instance(process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex) db.session.commit() raise ex finally: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 3a0307f18..c31bb447d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -322,19 +322,20 @@ class ProcessInstanceService: cls.replace_file_data_with_digest_references(data, models) - @staticmethod + @classmethod def update_form_task_data( - processor: ProcessInstanceProcessor, + cls, + process_instance: ProcessInstanceModel, spiff_task: SpiffTask, data: dict[str, Any], user: UserModel, ) -> None: - AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user) - ProcessInstanceService.save_file_data_and_replace_with_digest_references( + AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, user) + cls.save_file_data_and_replace_with_digest_references( data, - processor.process_instance_model.id, + process_instance.id, ) - dot_dct = ProcessInstanceService.create_dot_dict(data) + dot_dct = cls.create_dot_dict(data) spiff_task.update_data(dot_dct) @staticmethod @@ -350,7 +351,7 @@ class ProcessInstanceService: Abstracted here because we need to do it multiple times when completing all tasks in a multi-instance task. """ - ProcessInstanceService.update_form_task_data(processor, spiff_task, data, user) + ProcessInstanceService.update_form_task_data(processor.process_instance_model, spiff_task, data, user) # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. processor.complete_task(spiff_task, human_task, user=user) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 76880da2b..d8123fd81 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -1,5 +1,8 @@ import copy import json +from flask import g +from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel +import traceback import time from hashlib import sha256 from typing import Optional @@ -592,3 +595,34 @@ class TaskService: for json_data_dict in json_data_dict_list: if json_data_dict is not None: json_data_dicts[json_data_dict["hash"]] = json_data_dict + + # TODO: move to process_instance_service once we clean it and the processor up + @classmethod + def add_event_to_process_instance( + cls, + process_instance: ProcessInstanceModel, + event_type: str, + task_guid: Optional[str] = None, + user_id: Optional[int] = None, + exception: Optional[Exception] = None, + ) -> None: + if user_id is None and hasattr(g, "user") and g.user: + user_id = g.user.id + process_instance_event = ProcessInstanceEventModel( + process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id + ) + if task_guid: + process_instance_event.task_guid = task_guid + db.session.add(process_instance_event) + + if event_type == ProcessInstanceEventType.process_instance_error.value and exception is not None: + # truncate to avoid database errors on large values. We observed that text in mysql is 65K. + stacktrace = traceback.format_exc()[0:63999] + message = str(exception)[0:1023] + + process_instance_error_detail = ProcessInstanceErrorDetailModel( + process_instance_event=process_instance_event, + message=message, + stacktrace=stacktrace, + ) + db.session.add(process_instance_error_detail) From 6747d9df3d9850ce7147ba025796e2131517e8c9 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 19 Apr 2023 13:56:00 -0400 Subject: [PATCH 02/11] added api to get error details for an event and added simple modal on frontend to show it --- .../src/spiffworkflow_backend/api.yml | 33 ++++++ .../models/process_instance_error_detail.py | 2 + .../models/process_instance_event.py | 2 +- .../process_instance_events_controller.py | 18 +++ .../src/hooks/UriListForPermissions.tsx | 11 +- spiffworkflow-frontend/src/interfaces.ts | 6 + .../src/routes/ProcessInstanceLogList.tsx | 112 +++++++++++++++++- .../src/types/definitions.d.ts | 1 + 8 files changed, 174 insertions(+), 11 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index c14550130..131bce354 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -2010,6 +2010,39 @@ paths: schema: $ref: "#/components/schemas/ProcessInstanceLog" + /event-error-details/{modified_process_model_identifier}/{process_instance_id}/{process_instance_event_id}: + parameters: + - name: process_instance_id + in: path + required: true + description: the id of the process instance + schema: + type: integer + - name: modified_process_model_identifier + in: path + required: true + description: The process_model_id, modified to replace slashes (/) + schema: + type: string + - name: process_instance_event_id + in: path + required: true + description: the id of the process instance event + schema: + type: integer + get: + tags: + - Process Instance Events + operationId: spiffworkflow_backend.routes.process_instance_events_controller.error_details + summary: returns the error details for a given process instance event. + responses: + "200": + description: list of types + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessInstanceLog" + /secrets: parameters: - name: page diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py index 91324c772..d1a38c49e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py @@ -1,9 +1,11 @@ +from dataclasses import dataclass from sqlalchemy.orm import relationship from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey from spiffworkflow_backend.models.db import db +@dataclass class ProcessInstanceErrorDetailModel(SpiffworkflowBaseDBModel): __tablename__ = "process_instance_error_detail" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py index 936e4d9d3..0b3738169 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -39,7 +39,7 @@ class ProcessInstanceEventModel(SpiffworkflowBaseDBModel): user_id = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore - error_deatils = relationship("ProcessInstanceErrorDetailModel", cascade="delete") # type: ignore + error_details = relationship("ProcessInstanceErrorDetailModel", cascade="delete") # type: ignore @validates("event_type") def validate_event_type(self, key: str, value: Any) -> Any: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py index 5711f6bcc..d829434e7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py @@ -1,4 +1,5 @@ from typing import Optional +from spiffworkflow_backend.exceptions.api_error import ApiError import flask.wrappers from flask import jsonify @@ -91,3 +92,20 @@ def types() -> flask.wrappers.Response: task_types = [t.typename for t in query] event_types = ProcessInstanceEventType.list() return make_response(jsonify({"task_types": task_types, "event_types": event_types}), 200) + + +def error_details( + modified_process_model_identifier: str, + process_instance_id: int, + process_instance_event_id: int, +) -> flask.wrappers.Response: + process_instance_event = ProcessInstanceEventModel.query.filter_by(id=process_instance_event_id).first() + if process_instance_event is None: + raise ( + ApiError( + error_code="process_instance_event_cannot_be_found", + message=f"Process instance event cannot be found: {process_instance_event_id}", + status_code=400, + ) + ) + return make_response(jsonify(process_instance_event.error_details[0]), 200) diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index c33946985..ae663541b 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -10,18 +10,19 @@ export const useUriListForPermissions = () => { processGroupListPath: '/v1.0/process-groups', processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`, + processInstanceCompleteTaskPath: `/v1.0/complete-task/${params.process_model_id}/${params.process_instance_id}`, processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, + processInstanceErrorEventDetails: `/v1.0/event-error-details/${params.process_model_id}/${params.process_instance_id}`, processInstanceListPath: '/v1.0/process-instances', processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', - processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`, - processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`, processInstanceResetPath: `/v1.0/process-instance-reset/${params.process_model_id}/${params.process_instance_id}`, - processInstanceTaskDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`, processInstanceSendEventPath: `/v1.0/send-event/${params.process_model_id}/${params.process_instance_id}`, - processInstanceCompleteTaskPath: `/v1.0/complete-task/${params.process_model_id}/${params.process_instance_id}`, - processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`, + processInstanceTaskDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, processInstanceTaskListForMePath: `/v1.0/process-instances/for-me/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, processInstanceTerminatePath: `/v1.0/process-instance-terminate/${params.process_model_id}/${params.process_instance_id}`, processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 802c48c7d..c26c1ce32 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -297,6 +297,12 @@ export interface JsonSchemaForm { required: string[]; } +export interface ProcessInstanceEventErrorDetail { + id: number; + message: string; + stacktrace: string; +} + export interface ProcessInstanceLogEntry { bpmn_process_definition_identifier: string; bpmn_process_definition_name: string; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index d32a5b347..16bb3f685 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -1,4 +1,5 @@ import { useEffect, useState } from 'react'; +import { ErrorOutline } from '@carbon/icons-react'; import { Table, Tabs, @@ -10,6 +11,8 @@ import { Button, TextInput, ComboBox, + Modal, + Loading, // @ts-ignore } from '@carbon/react'; import { @@ -28,8 +31,13 @@ import { } from '../helpers'; import HttpService from '../services/HttpService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; -import { ProcessInstanceLogEntry } from '../interfaces'; +import { + PermissionsToCheck, + ProcessInstanceEventErrorDetail, + ProcessInstanceLogEntry, +} from '../interfaces'; import Filters from '../components/Filters'; +import { usePermissionFetcher } from '../hooks/PermissionService'; type OwnProps = { variant: string; @@ -46,11 +54,16 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const [taskTypes, setTaskTypes] = useState([]); const [eventTypes, setEventTypes] = useState([]); + const [eventForModal, setEventForModal] = + useState(null); + const [eventErrorDetails, setEventErrorDetails] = + useState(null); const { targetUris } = useUriListForPermissions(); - const isDetailedView = searchParams.get('detailed') === 'true'; - - const taskNameHeader = isDetailedView ? 'Task Name' : 'Milestone'; + const permissionRequestData: PermissionsToCheck = { + [targetUris.processInstanceErrorEventDetails]: ['GET'], + }; + const { ability } = usePermissionFetcher(permissionRequestData); const [showFilterOptions, setShowFilterOptions] = useState(false); @@ -58,6 +71,8 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { if (variant === 'all') { processInstanceShowPageBaseUrl = `/admin/process-instances/${params.process_model_id}`; } + const isDetailedView = searchParams.get('detailed') === 'true'; + const taskNameHeader = isDetailedView ? 'Task Name' : 'Milestone'; const updateSearchParams = (value: string, key: string) => { if (value) { @@ -128,6 +143,92 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { isDetailedView, ]); + const handleErrorEventModalClose = () => { + setEventForModal(null); + setEventErrorDetails(null); + }; + + const errorEventModal = () => { + if (eventForModal) { + const modalHeading = `Event Error Details for`; + let errorMessageTag = ( + + ); + if (eventErrorDetails) { + errorMessageTag = ( + <> +

{eventErrorDetails.message} NOOO

+
+
{eventErrorDetails.stacktrace}
+ + ); + } + return ( + + {errorMessageTag} + + ); + } + return null; + }; + + const handleErrorDetailsReponse = ( + results: ProcessInstanceEventErrorDetail + ) => { + setEventErrorDetails(results); + }; + + const getErrorDetailsForEvent = (logEntry: ProcessInstanceLogEntry) => { + setEventForModal(logEntry); + if (ability.can('GET', targetUris.processInstanceErrorEventDetails)) { + HttpService.makeCallToBackend({ + path: `${targetUris.processInstanceErrorEventDetails}/${logEntry.id}`, + httpMethod: 'GET', + successCallback: handleErrorDetailsReponse, + failureCallback: (error: any) => { + const errorObject: ProcessInstanceEventErrorDetail = { + id: 0, + message: `ERROR: ${error.message}`, + stacktrace: '', + }; + setEventErrorDetails(errorObject); + }, + }); + } + }; + + const eventTypeCell = (logEntry: ProcessInstanceLogEntry) => { + if ( + ['process_instance_error', 'task_error'].includes(logEntry.event_type) + ) { + const errorTitle = 'Event has an error'; + const errorIcon = ( + <> +   + + + ); + return ( + + ); + } + return logEntry.event_type; + }; + const getTableRow = (logEntry: ProcessInstanceLogEntry) => { const tableRow = []; const taskNameCell = ( @@ -164,7 +265,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { <> {logEntry.task_definition_identifier} {logEntry.bpmn_task_type} - {logEntry.event_type} + {eventTypeCell(logEntry)} {logEntry.username || ( system @@ -405,6 +506,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { ]} /> {tabs()} + {errorEventModal()} Date: Wed, 19 Apr 2023 15:20:19 -0400 Subject: [PATCH 03/11] display event errors in the frontend using errorDisplay w/ burnettk --- .../{c95031498e62_.py => e0510795b643_.py} | 10 +- spiffworkflow-backend/poetry.lock | 6 +- spiffworkflow-backend/pyproject.toml | 2 +- .../models/process_instance_error_detail.py | 8 +- .../routes/process_instances_controller.py | 2 + .../services/task_service.py | 19 +++- .../services/workflow_execution_service.py | 2 + .../src/components/ErrorDisplay.tsx | 93 ++++++++++--------- spiffworkflow-frontend/src/interfaces.ts | 7 +- .../src/routes/ProcessInstanceLogList.tsx | 17 +++- 10 files changed, 112 insertions(+), 54 deletions(-) rename spiffworkflow-backend/migrations/versions/{c95031498e62_.py => e0510795b643_.py} (99%) diff --git a/spiffworkflow-backend/migrations/versions/c95031498e62_.py b/spiffworkflow-backend/migrations/versions/e0510795b643_.py similarity index 99% rename from spiffworkflow-backend/migrations/versions/c95031498e62_.py rename to spiffworkflow-backend/migrations/versions/e0510795b643_.py index c375745cb..d613a74a9 100644 --- a/spiffworkflow-backend/migrations/versions/c95031498e62_.py +++ b/spiffworkflow-backend/migrations/versions/e0510795b643_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: c95031498e62 +Revision ID: e0510795b643 Revises: -Create Date: 2023-04-19 10:35:25.813002 +Create Date: 2023-04-19 14:36:11.004444 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = 'c95031498e62' +revision = 'e0510795b643' down_revision = None branch_labels = None depends_on = None @@ -468,6 +468,10 @@ def upgrade(): sa.Column('process_instance_event_id', sa.Integer(), nullable=False), sa.Column('message', sa.String(length=1024), nullable=False), sa.Column('stacktrace', sa.Text(), nullable=False), + sa.Column('task_line_number', sa.Integer(), nullable=True), + sa.Column('task_offset', sa.Integer(), nullable=True), + sa.Column('task_line_contents', sa.String(length=255), nullable=True), + sa.Column('task_trace', sa.JSON(), nullable=True), sa.ForeignKeyConstraint(['process_instance_event_id'], ['process_instance_event.id'], ), sa.PrimaryKeyConstraint('id') ) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index d26d00420..5ba2205a3 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1923,8 +1923,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "7211e67ee0dfecbabaeb7cec8f0e0373bd7cdc10" +reference = "feature/new-task-states" +resolved_reference = "5a0fd2774fde20dd65dbb49ae67f22cad53df528" [[package]] name = "sqlalchemy" @@ -2307,7 +2307,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "994c36ab39238500b4fd05bc1ccdd2d729dd5f66749ab77b1028371147bdf753" +content-hash = "4ae3c31115f378193f33eb9b27d376fcf0f7c20fba54ed5c921f37a4778b8d09" [metadata.files] alabaster = [ diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index d4288f5b7..bfa0ad071 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,7 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/new-task-states"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} # SpiffWorkflow = {develop = true, path = "../../SpiffWorkflow" } sentry-sdk = "^1.10" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py index d1a38c49e..663ab6cee 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from typing import Optional from sqlalchemy.orm import relationship from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey @@ -11,9 +12,14 @@ class ProcessInstanceErrorDetailModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) process_instance_event_id: int = db.Column(ForeignKey("process_instance_event.id"), nullable=False, index=True) - process_instance_event = relationship('ProcessInstanceEventModel') + process_instance_event = relationship('ProcessInstanceEventModel') # type: ignore message: str = db.Column(db.String(1024), nullable=False) # this should be 65k in mysql stacktrace: str = db.Column(db.Text(), nullable=False) + + task_line_number: Optional[int] = db.Column(db.Integer) + task_offset: Optional[int] = db.Column(db.Integer) + task_line_contents: Optional[str] = db.Column(db.String(255)) + task_trace: Optional[list] = db.Column(db.JSON) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 1e6833c6a..58acee749 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -131,6 +131,7 @@ def process_instance_run( ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError, ) as e: + # import pdb; pdb.set_trace() ErrorHandlingService.handle_error(process_instance, e) raise e except Exception as e: @@ -138,6 +139,7 @@ def process_instance_run( # FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes. # we need to recurse through all last tasks if the last task is a call activity or subprocess. if processor is not None: + # import pdb; pdb.set_trace() task = processor.bpmn_process_instance.last_task raise ApiError.from_task( error_code="unknown_exception", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index d8123fd81..e965bdac3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -1,5 +1,7 @@ import copy import json +from spiffworkflow_backend.exceptions.api_error import ApiError +from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore from flask import g from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel import traceback @@ -158,7 +160,7 @@ class TaskService: if task_model.state == "COMPLETED" or task_failed: event_type = ProcessInstanceEventType.task_completed.value - if task_failed: + if task_failed or task_model.state == TaskState.ERROR: event_type = ProcessInstanceEventType.task_failed.value # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete @@ -620,9 +622,24 @@ class TaskService: stacktrace = traceback.format_exc()[0:63999] message = str(exception)[0:1023] + task_line_number = None + task_line_contents = None + task_trace = None + task_offset = None + # import pdb; pdb.set_trace() + if isinstance(exception, WorkflowTaskException) or (isinstance(exception, ApiError) and exception.error_code == 'task_error'): + task_line_number = exception.line_number + task_line_contents = exception.error_line + task_trace = exception.task_trace + task_offset = exception.offset + process_instance_error_detail = ProcessInstanceErrorDetailModel( process_instance_event=process_instance_event, message=message, stacktrace=stacktrace, + task_line_number=task_line_number, + task_line_contents=task_line_contents, + task_trace=task_trace, + task_offset=task_offset, ) db.session.add(process_instance_error_detail) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 5764cc89a..253c402ea 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -138,6 +138,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): | TaskState.MAYBE | TaskState.LIKELY | TaskState.FUTURE + | TaskState.STARTED + | TaskState.ERROR ): # these will be removed from the parent and then ignored if waiting_spiff_task._has_state(TaskState.PREDICTED_MASK): diff --git a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx index 411a9a532..dc9737270 100644 --- a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx +++ b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx @@ -1,5 +1,6 @@ import { Notification } from './Notification'; import useAPIError from '../hooks/UseApiError'; +import { ErrorForDisplay } from '../interfaces'; function errorDetailDisplay( errorObject: any, @@ -18,57 +19,65 @@ function errorDetailDisplay( return null; } +export const childrenForErrorObject = (errorObject: ErrorForDisplay) => { + let sentryLinkTag = null; + if (errorObject.sentry_link) { + sentryLinkTag = ( + + { + ': Find details about this error here (it may take a moment to become available): ' + } + + {errorObject.sentry_link} + + + ); + } + + const message =
{errorObject.message}
; + const taskName = errorDetailDisplay(errorObject, 'task_name', 'Task Name'); + const taskId = errorDetailDisplay(errorObject, 'task_id', 'Task ID'); + const fileName = errorDetailDisplay(errorObject, 'file_name', 'File Name'); + const lineNumber = errorDetailDisplay( + errorObject, + 'line_number', + 'Line Number' + ); + const errorLine = errorDetailDisplay(errorObject, 'error_line', 'Context'); + let taskTrace = null; + if (errorObject.task_trace && errorObject.task_trace.length > 1) { + taskTrace = ( +
+ Call Activity Trace: + {errorObject.task_trace.reverse().join(' -> ')} +
+ ); + } + + return [ + message, +
, + sentryLinkTag, + taskName, + taskId, + fileName, + lineNumber, + errorLine, + taskTrace, + ]; +}; + export default function ErrorDisplay() { const errorObject = useAPIError().error; const { removeError } = useAPIError(); let errorTag = null; - if (errorObject) { - let sentryLinkTag = null; - if (errorObject.sentry_link) { - sentryLinkTag = ( - - { - ': Find details about this error here (it may take a moment to become available): ' - } - - {errorObject.sentry_link} - - - ); - } - const message =
{errorObject.message}
; + if (errorObject) { const title = 'Error:'; - const taskName = errorDetailDisplay(errorObject, 'task_name', 'Task Name'); - const taskId = errorDetailDisplay(errorObject, 'task_id', 'Task ID'); - const fileName = errorDetailDisplay(errorObject, 'file_name', 'File Name'); - const lineNumber = errorDetailDisplay( - errorObject, - 'line_number', - 'Line Number' - ); - const errorLine = errorDetailDisplay(errorObject, 'error_line', 'Context'); - let taskTrace = null; - if (errorObject.task_trace && errorObject.task_trace.length > 1) { - taskTrace = ( -
- Call Activity Trace: - {errorObject.task_trace.reverse().join(' -> ')} -
- ); - } errorTag = ( removeError()} type="error"> - {message} -
- {sentryLinkTag} - {taskName} - {taskId} - {fileName} - {lineNumber} - {errorLine} - {taskTrace} + <>{childrenForErrorObject(errorObject)}
); } diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index c26c1ce32..7cf097d9e 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -238,8 +238,9 @@ export interface ErrorForDisplay { task_name?: string; task_id?: string; line_number?: number; + error_line?: string; file_name?: string; - task_trace?: [string]; + task_trace?: string[]; } export interface AuthenticationParam { @@ -301,6 +302,10 @@ export interface ProcessInstanceEventErrorDetail { id: number; message: string; stacktrace: string; + task_line_contents?: string; + task_line_number?: number; + task_offset?: number; + task_trace?: string[]; } export interface ProcessInstanceLogEntry { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 16bb3f685..7f54efd17 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -32,12 +32,14 @@ import { import HttpService from '../services/HttpService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { + ErrorForDisplay, PermissionsToCheck, ProcessInstanceEventErrorDetail, ProcessInstanceLogEntry, } from '../interfaces'; import Filters from '../components/Filters'; import { usePermissionFetcher } from '../hooks/PermissionService'; +import { childrenForErrorObject } from '../components/ErrorDisplay'; type OwnProps = { variant: string; @@ -54,6 +56,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const [taskTypes, setTaskTypes] = useState([]); const [eventTypes, setEventTypes] = useState([]); + const [eventForModal, setEventForModal] = useState(null); const [eventErrorDetails, setEventErrorDetails] = @@ -150,15 +153,25 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const errorEventModal = () => { if (eventForModal) { - const modalHeading = `Event Error Details for`; + const modalHeading = 'Event Error Details'; let errorMessageTag = ( ); if (eventErrorDetails) { + const errorForDisplay: ErrorForDisplay = { + message: eventErrorDetails.message, + task_name: eventForModal.task_definition_name, + task_id: eventForModal.task_definition_identifier, + line_number: eventErrorDetails.task_line_number, + error_line: eventErrorDetails.task_line_contents, + task_trace: eventErrorDetails.task_trace, + }; + const errorChildren = childrenForErrorObject(errorForDisplay); errorMessageTag = ( <> -

{eventErrorDetails.message} NOOO

+

{eventErrorDetails.message}


+ {errorChildren}
{eventErrorDetails.stacktrace}
); From 6fa18c53df7ea9c5c055d0ed169e5fe26bfd3fc9 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 07:35:39 -0400 Subject: [PATCH 04/11] some debug stuff --- .../src/spiffworkflow_backend/services/task_service.py | 3 +-- spiffworkflow-frontend/src/components/ErrorDisplay.tsx | 2 +- spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index e965bdac3..495394622 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -617,7 +617,7 @@ class TaskService: process_instance_event.task_guid = task_guid db.session.add(process_instance_event) - if event_type == ProcessInstanceEventType.process_instance_error.value and exception is not None: + if exception is not None: # truncate to avoid database errors on large values. We observed that text in mysql is 65K. stacktrace = traceback.format_exc()[0:63999] message = str(exception)[0:1023] @@ -626,7 +626,6 @@ class TaskService: task_line_contents = None task_trace = None task_offset = None - # import pdb; pdb.set_trace() if isinstance(exception, WorkflowTaskException) or (isinstance(exception, ApiError) and exception.error_code == 'task_error'): task_line_number = exception.line_number task_line_contents = exception.error_line diff --git a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx index dc9737270..5b68d301e 100644 --- a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx +++ b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx @@ -45,7 +45,7 @@ export const childrenForErrorObject = (errorObject: ErrorForDisplay) => { ); const errorLine = errorDetailDisplay(errorObject, 'error_line', 'Context'); let taskTrace = null; - if (errorObject.task_trace && errorObject.task_trace.length > 1) { + if (errorObject.task_trace && errorObject.task_trace.length > 0) { taskTrace = (
Call Activity Trace: diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 7f54efd17..cae1e8802 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -167,12 +167,12 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { task_trace: eventErrorDetails.task_trace, }; const errorChildren = childrenForErrorObject(errorForDisplay); + //
{eventErrorDetails.stacktrace}
errorMessageTag = ( <>

{eventErrorDetails.message}


{errorChildren} -
{eventErrorDetails.stacktrace}
); } From 08271da363df7ef95a93d0bae833a25815f59d3f Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 08:31:55 -0400 Subject: [PATCH 05/11] store stacktrace as a json array so we can reverse it when displaying and fixed up display of errors --- .../{e0510795b643_.py => 5dae229f0a9b_.py} | 8 +++---- .../models/process_instance_error_detail.py | 2 +- .../services/task_service.py | 2 +- .../services/workflow_execution_service.py | 2 ++ .../src/components/ErrorDisplay.tsx | 22 +++++++++++++++---- spiffworkflow-frontend/src/interfaces.ts | 5 ++++- .../src/routes/ProcessInstanceLogList.tsx | 17 +++++--------- 7 files changed, 36 insertions(+), 22 deletions(-) rename spiffworkflow-backend/migrations/versions/{e0510795b643_.py => 5dae229f0a9b_.py} (99%) diff --git a/spiffworkflow-backend/migrations/versions/e0510795b643_.py b/spiffworkflow-backend/migrations/versions/5dae229f0a9b_.py similarity index 99% rename from spiffworkflow-backend/migrations/versions/e0510795b643_.py rename to spiffworkflow-backend/migrations/versions/5dae229f0a9b_.py index d613a74a9..25d6edffe 100644 --- a/spiffworkflow-backend/migrations/versions/e0510795b643_.py +++ b/spiffworkflow-backend/migrations/versions/5dae229f0a9b_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: e0510795b643 +Revision ID: 5dae229f0a9b Revises: -Create Date: 2023-04-19 14:36:11.004444 +Create Date: 2023-04-20 08:19:03.409112 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = 'e0510795b643' +revision = '5dae229f0a9b' down_revision = None branch_labels = None depends_on = None @@ -467,7 +467,7 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_event_id', sa.Integer(), nullable=False), sa.Column('message', sa.String(length=1024), nullable=False), - sa.Column('stacktrace', sa.Text(), nullable=False), + sa.Column('stacktrace', sa.JSON(), nullable=False), sa.Column('task_line_number', sa.Integer(), nullable=True), sa.Column('task_offset', sa.Integer(), nullable=True), sa.Column('task_line_contents', sa.String(length=255), nullable=True), diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py index 663ab6cee..743cb5fd5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py @@ -17,7 +17,7 @@ class ProcessInstanceErrorDetailModel(SpiffworkflowBaseDBModel): message: str = db.Column(db.String(1024), nullable=False) # this should be 65k in mysql - stacktrace: str = db.Column(db.Text(), nullable=False) + stacktrace: Optional[list] = db.Column(db.JSON, nullable=False) task_line_number: Optional[int] = db.Column(db.Integer) task_offset: Optional[int] = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 495394622..73baa7389 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -619,7 +619,7 @@ class TaskService: if exception is not None: # truncate to avoid database errors on large values. We observed that text in mysql is 65K. - stacktrace = traceback.format_exc()[0:63999] + stacktrace = traceback.format_exc().split("\n") message = str(exception)[0:1023] task_line_number = None diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 253c402ea..8ccd5456c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,5 +1,6 @@ import copy import time +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from typing import Callable from typing import Optional from typing import Set @@ -329,6 +330,7 @@ class WorkflowExecutionService: self.process_bpmn_messages() self.queue_waiting_receive_messages() except SpiffWorkflowException as swe: + TaskService.add_event_to_process_instance(self.process_instance_model, ProcessInstanceEventType.task_failed.value, exception=swe, task_guid=str(swe.task.id)) raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe finally: diff --git a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx index 5b68d301e..40abec5d5 100644 --- a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx +++ b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx @@ -34,7 +34,9 @@ export const childrenForErrorObject = (errorObject: ErrorForDisplay) => { ); } - const message =
{errorObject.message}
; + const message = ( +
{errorObject.message}
+ ); const taskName = errorDetailDisplay(errorObject, 'task_name', 'Task Name'); const taskId = errorDetailDisplay(errorObject, 'task_id', 'Task ID'); const fileName = errorDetailDisplay(errorObject, 'file_name', 'File Name'); @@ -44,14 +46,26 @@ export const childrenForErrorObject = (errorObject: ErrorForDisplay) => { 'Line Number' ); const errorLine = errorDetailDisplay(errorObject, 'error_line', 'Context'); - let taskTrace = null; + let codeTrace = null; if (errorObject.task_trace && errorObject.task_trace.length > 0) { - taskTrace = ( + codeTrace = (
Call Activity Trace: {errorObject.task_trace.reverse().join(' -> ')}
); + } else if (errorObject.stacktrace) { + codeTrace = ( +
+        Stacktrace:
+        {errorObject.stacktrace.reverse().map((a) => (
+          <>
+            {a}
+            
+ + ))} +
+ ); } return [ @@ -63,7 +77,7 @@ export const childrenForErrorObject = (errorObject: ErrorForDisplay) => { fileName, lineNumber, errorLine, - taskTrace, + codeTrace, ]; }; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 7cf097d9e..f0b2f48cf 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -234,6 +234,8 @@ export type HotCrumbItem = HotCrumbItemArray | HotCrumbItemObject; export interface ErrorForDisplay { message: string; + + messageClassName?: string; sentry_link?: string; task_name?: string; task_id?: string; @@ -241,6 +243,7 @@ export interface ErrorForDisplay { error_line?: string; file_name?: string; task_trace?: string[]; + stacktrace?: string[]; } export interface AuthenticationParam { @@ -301,7 +304,7 @@ export interface JsonSchemaForm { export interface ProcessInstanceEventErrorDetail { id: number; message: string; - stacktrace: string; + stacktrace: string[]; task_line_contents?: string; task_line_number?: number; task_offset?: number; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index cae1e8802..dac849ac6 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -160,21 +160,16 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { if (eventErrorDetails) { const errorForDisplay: ErrorForDisplay = { message: eventErrorDetails.message, + messageClassName: 'failure-string', task_name: eventForModal.task_definition_name, task_id: eventForModal.task_definition_identifier, line_number: eventErrorDetails.task_line_number, error_line: eventErrorDetails.task_line_contents, task_trace: eventErrorDetails.task_trace, + stacktrace: eventErrorDetails.stacktrace, }; const errorChildren = childrenForErrorObject(errorForDisplay); - //
{eventErrorDetails.stacktrace}
- errorMessageTag = ( - <> -

{eventErrorDetails.message}

-
- {errorChildren} - - ); + errorMessageTag = <>{errorChildren}; } return ( { const errorObject: ProcessInstanceEventErrorDetail = { id: 0, - message: `ERROR: ${error.message}`, - stacktrace: '', + message: `ERROR retrieving error details: ${error.message}`, + stacktrace: [], }; setEventErrorDetails(errorObject); }, @@ -218,7 +213,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const eventTypeCell = (logEntry: ProcessInstanceLogEntry) => { if ( - ['process_instance_error', 'task_error'].includes(logEntry.event_type) + ['process_instance_error', 'task_failed'].includes(logEntry.event_type) ) { const errorTitle = 'Event has an error'; const errorIcon = ( From 1b65a277c4e97507eafb7291ec933ca64d33e458 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 11:49:34 -0400 Subject: [PATCH 06/11] properly save task and instance exceptions to the db and display in the frontend w/ burnettk --- .../models/process_instance.py | 4 - .../routes/process_instances_controller.py | 21 ++-- .../services/error_handling_service.py | 10 +- .../services/process_instance_processor.py | 40 ++----- .../process_instance_queue_service.py | 6 +- .../services/process_instance_service.py | 34 +++--- .../services/task_service.py | 36 ++++--- .../services/workflow_execution_service.py | 102 ++++++++++++------ .../src/components/ReactDiagramEditor.tsx | 15 +++ spiffworkflow-frontend/src/index.css | 4 + spiffworkflow-frontend/src/interfaces.ts | 1 + .../src/routes/ProcessInstanceShow.tsx | 4 + 12 files changed, 164 insertions(+), 113 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index 5b35df3e1..009a7486e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -205,7 +205,6 @@ class ProcessInstanceApi: next_task: Task | None, process_model_identifier: str, process_model_display_name: str, - completed_tasks: int, updated_at_in_seconds: int, ) -> None: """__init__.""" @@ -214,7 +213,6 @@ class ProcessInstanceApi: self.next_task = next_task # The next task that requires user input. self.process_model_identifier = process_model_identifier self.process_model_display_name = process_model_display_name - self.completed_tasks = completed_tasks self.updated_at_in_seconds = updated_at_in_seconds @@ -231,7 +229,6 @@ class ProcessInstanceApiSchema(Schema): "next_task", "process_model_identifier", "process_model_display_name", - "completed_tasks", "updated_at_in_seconds", ] unknown = INCLUDE @@ -248,7 +245,6 @@ class ProcessInstanceApiSchema(Schema): "next_task", "process_model_identifier", "process_model_display_name", - "completed_tasks", "updated_at_in_seconds", ] filtered_fields = {key: data[key] for key in keys} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 58acee749..b8217fa33 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -124,14 +124,12 @@ def process_instance_run( processor = None try: - processor = ProcessInstanceProcessor(process_instance) - processor.do_engine_steps(save=True) + processor = ProcessInstanceService.run_process_intance_with_processor(process_instance) except ( ApiError, ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError, ) as e: - # import pdb; pdb.set_trace() ErrorHandlingService.handle_error(process_instance, e) raise e except Exception as e: @@ -139,7 +137,6 @@ def process_instance_run( # FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes. # we need to recurse through all last tasks if the last task is a call activity or subprocess. if processor is not None: - # import pdb; pdb.set_trace() task = processor.bpmn_process_instance.last_task raise ApiError.from_task( error_code="unknown_exception", @@ -152,11 +149,17 @@ def process_instance_run( if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() - process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor) - process_instance_data = processor.get_data() - process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) - process_instance_metadata["data"] = process_instance_data - return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json") + # for mypy + if processor is not None: + process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor) + process_instance_data = processor.get_data() + process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) + process_instance_metadata["data"] = process_instance_data + return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json") + + # FIXME: this should never happen currently but it'd be ideal to always do this + # currently though it does not return next task so it cannnot be used to take the user to the next human task + return make_response(jsonify(process_instance), 200) def process_instance_terminate( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py index 6696f3538..1ebc5202b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py @@ -19,10 +19,10 @@ class ErrorHandlingService: MESSAGE_NAME = "SystemErrorMessage" @classmethod - def handle_error(cls, process_instance: ProcessInstanceModel, error: Union[ApiError, Exception]) -> None: + def handle_error(cls, process_instance: ProcessInstanceModel, error: Exception) -> None: """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" process_model = ProcessModelService.get_process_model(process_instance.process_model_identifier) - cls._update_process_instance_in_database(process_instance, error, process_model.fault_or_suspend_on_exception) + cls._update_process_instance_in_database(process_instance, process_model.fault_or_suspend_on_exception) # Second, send a bpmn message out, but only if an exception notification address is provided # This will create a new Send Message with correlation keys on the recipients and the message @@ -35,9 +35,7 @@ class ErrorHandlingService: current_app.logger.error(e) @classmethod - def _update_process_instance_in_database(cls, process_instance: ProcessInstanceModel, error: Union[ApiError, Exception], fault_or_suspend_on_exception: str) -> None: - TaskService.add_event_to_process_instance(process_instance, ProcessInstanceEventType.process_instance_error.value, exception=error) - + def _update_process_instance_in_database(cls, process_instance: ProcessInstanceModel, fault_or_suspend_on_exception: str) -> None: # First, suspend or fault the instance if fault_or_suspend_on_exception == "suspend": cls._set_instance_status( @@ -55,7 +53,7 @@ class ErrorHandlingService: @staticmethod def _handle_system_notification( - error: Union[ApiError, Exception], + error: Exception, process_model: ProcessModelInfo, process_instance: ProcessInstanceModel, ) -> None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 1e2a42f33..c4961722e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -108,6 +108,7 @@ from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( + ExecutionStrategyNotConfiguredError, execution_strategy_named, ) from spiffworkflow_backend.services.workflow_execution_service import ( @@ -162,9 +163,10 @@ class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # ty script: str, context: Dict[str, Any], external_methods: Optional[Dict[str, Any]] = None, - ) -> None: + ) -> bool: super().execute(script, context, external_methods) self._last_result = context + return True def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: return {} @@ -217,7 +219,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) script: str, context: Dict[str, Any], external_methods: Optional[Dict[str, Any]] = None, - ) -> None: + ) -> bool: # TODO: once integrated look at the tests that fail without Box # context is task.data Box.convert_to_box(context) @@ -239,6 +241,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) # the task data needs to be updated with the current state so data references can be resolved properly. # the state will be removed later once the task is completed. context.update(self.state) + return True def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: keys_to_filter = self.non_user_defined_keys @@ -318,13 +321,7 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore # This will overwrite the standard builtins default_globals.update(safe_globals) default_globals["__builtins__"]["__import__"] = _import - environment = CustomScriptEngineEnvironment(default_globals) - - # right now spiff is executing script tasks on ready so doing this - # so we know when something fails and we can save it to our database. - self.failing_spiff_task: Optional[SpiffTask] = None - super().__init__(environment=environment) def __get_augment_methods(self, task: Optional[SpiffTask]) -> Dict[str, Callable]: @@ -351,7 +348,6 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore expression: str, external_methods: Optional[dict[str, Any]] = None, ) -> Any: - """Evaluate.""" return self._evaluate(expression, task.data, task, external_methods) def _evaluate( @@ -361,7 +357,6 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore task: Optional[SpiffTask] = None, external_methods: Optional[Dict[str, Any]] = None, ) -> Any: - """_evaluate.""" methods = self.__get_augment_methods(task) if external_methods: methods.update(external_methods) @@ -381,17 +376,15 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore exception=exception, ) from exception - def execute(self, task: SpiffTask, script: str, external_methods: Any = None) -> None: - """Execute.""" + def execute(self, task: SpiffTask, script: str, external_methods: Any = None) -> bool: try: # reset failing task just in case - self.failing_spiff_task = None methods = self.__get_augment_methods(task) if external_methods: methods.update(external_methods) super().execute(task, script, methods) + return True except WorkflowException as e: - self.failing_spiff_task = task raise e except Exception as e: raise self.create_task_exec_exception(task, script, e) from e @@ -402,7 +395,6 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore operation_params: Dict[str, Any], task_data: Dict[str, Any], ) -> Any: - """CallService.""" return ServiceTaskDelegate.call_connector(operation_name, operation_params, task_data) @@ -1124,14 +1116,10 @@ class ProcessInstanceProcessor: """Saves the current state of this processor to the database.""" self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION - complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] - user_tasks = list(self.get_all_user_tasks()) self.process_instance_model.status = self.get_status().value current_app.logger.debug( f"the_status: {self.process_instance_model.status} for instance {self.process_instance_model.id}" ) - self.process_instance_model.total_tasks = len(user_tasks) - self.process_instance_model.completed_tasks = sum(1 for t in user_tasks if t.state in complete_states) if self.process_instance_model.start_in_seconds is None: self.process_instance_model.start_in_seconds = round(time.time()) @@ -1693,6 +1681,8 @@ class ProcessInstanceProcessor: if execution_strategy_name is None: execution_strategy_name = current_app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"] + if execution_strategy_name is None: + raise ExecutionStrategyNotConfiguredError("SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB has not been set") execution_strategy = execution_strategy_named(execution_strategy_name, task_model_delegate) execution_service = WorkflowExecutionService( @@ -1702,16 +1692,7 @@ class ProcessInstanceProcessor: self._script_engine.environment.finalize_result, self.save, ) - try: - execution_service.run(exit_at, save) - finally: - # clear out failling spiff tasks here since the ProcessInstanceProcessor creates an instance of the - # script engine on a class variable. - if ( - hasattr(self._script_engine, "failing_spiff_task") - and self._script_engine.failing_spiff_task is not None - ): - self._script_engine.failing_spiff_task = None + execution_service.run(exit_at, save) @classmethod def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: @@ -1932,7 +1913,6 @@ class ProcessInstanceProcessor: return [t for t in all_tasks if not self.bpmn_process_instance._is_engine_task(t.task_spec)] def get_all_completed_tasks(self) -> list[SpiffTask]: - """Get_all_completed_tasks.""" all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) return [ t diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index 59915eb1b..587b6e80c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -1,5 +1,6 @@ import contextlib import time +from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.services.task_service import TaskService from typing import Generator @@ -99,7 +100,10 @@ class ProcessInstanceQueueService: except Exception as ex: process_instance.status = ProcessInstanceStatus.error.value db.session.add(process_instance) - TaskService.add_event_to_process_instance(process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex) + # these events are handled in the WorkflowExecutionService. + # that is, we don't need to add error_detail records here, etc. + if not isinstance(ex, WorkflowExecutionServiceError): + TaskService.add_event_to_process_instance(process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex) db.session.commit() raise ex finally: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index c31bb447d..aba946c7e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -113,20 +113,9 @@ class ProcessInstanceService: .all() ) for process_instance in records: + current_app.logger.info(f"Processing process_instance {process_instance.id}") try: - current_app.logger.info(f"Processing process_instance {process_instance.id}") - with ProcessInstanceQueueService.dequeued(process_instance): - processor = ProcessInstanceProcessor(process_instance) - if cls.can_optimistically_skip(processor, status_value): - current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}") - continue - - db.session.refresh(process_instance) - if process_instance.status == status_value: - execution_strategy_name = current_app.config[ - "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND" - ] - processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name) + cls.run_process_intance_with_processor(process_instance, status_value=status_value) except ProcessInstanceIsAlreadyLockedError: continue except Exception as e: @@ -137,6 +126,24 @@ class ProcessInstanceService: ) current_app.logger.error(error_message) + @classmethod + def run_process_intance_with_processor(cls, process_instance: ProcessInstanceModel, status_value: Optional[str] = None) -> Optional[ProcessInstanceProcessor]: + processor = None + with ProcessInstanceQueueService.dequeued(process_instance): + processor = ProcessInstanceProcessor(process_instance) + if status_value and cls.can_optimistically_skip(processor, status_value): + current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}") + return None + + db.session.refresh(process_instance) + if status_value is None or process_instance.status == status_value: + execution_strategy_name = current_app.config[ + "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND" + ] + processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name) + + return processor + @staticmethod def processor_to_process_instance_api( processor: ProcessInstanceProcessor, next_task: None = None @@ -155,7 +162,6 @@ class ProcessInstanceService: next_task=None, process_model_identifier=processor.process_model_identifier, process_model_display_name=processor.process_model_display_name, - completed_tasks=processor.process_instance_model.completed_tasks, updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 73baa7389..8e5a95cdd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -3,6 +3,7 @@ import json from spiffworkflow_backend.exceptions.api_error import ApiError from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore from flask import g +from spiffworkflow_backend.models import process_instance_error_detail from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel import traceback import time @@ -117,7 +118,6 @@ class TaskService: def update_task_model_with_spiff_task( self, spiff_task: SpiffTask, - task_failed: bool = False, start_and_end_times: Optional[StartAndEndTimes] = None, ) -> TaskModel: new_bpmn_process = None @@ -158,20 +158,11 @@ class TaskService: task_model.start_in_seconds = start_and_end_times["start_in_seconds"] task_model.end_in_seconds = start_and_end_times["end_in_seconds"] - if task_model.state == "COMPLETED" or task_failed: + # let failed tasks raise and we will log the event then + if task_model.state == "COMPLETED": event_type = ProcessInstanceEventType.task_completed.value - if task_failed or task_model.state == TaskState.ERROR: - event_type = ProcessInstanceEventType.task_failed.value - - # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete - # which script tasks execute when READY. timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() - process_instance_event = ProcessInstanceEventModel( - task_guid=task_model.guid, - process_instance_id=self.process_instance.id, - event_type=event_type, - timestamp=timestamp, - ) + process_instance_event, _process_instance_error_detail = TaskService.add_event_to_process_instance(self.process_instance, event_type, task_guid=task_model.guid, timestamp=timestamp, add_to_db_session=False) self.process_instance_events[task_model.guid] = process_instance_event self.update_bpmn_process(spiff_task.workflow, bpmn_process) @@ -607,16 +598,24 @@ class TaskService: task_guid: Optional[str] = None, user_id: Optional[int] = None, exception: Optional[Exception] = None, - ) -> None: + timestamp: Optional[float] = None, + add_to_db_session: Optional[bool] = True, + ) -> Tuple[ProcessInstanceEventModel, Optional[ProcessInstanceErrorDetailModel]]: if user_id is None and hasattr(g, "user") and g.user: user_id = g.user.id + if timestamp is None: + timestamp = time.time() + process_instance_event = ProcessInstanceEventModel( - process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id + process_instance_id=process_instance.id, event_type=event_type, timestamp=timestamp, user_id=user_id ) if task_guid: process_instance_event.task_guid = task_guid - db.session.add(process_instance_event) + if add_to_db_session: + db.session.add(process_instance_event) + + process_instance_error_detail = None if exception is not None: # truncate to avoid database errors on large values. We observed that text in mysql is 65K. stacktrace = traceback.format_exc().split("\n") @@ -641,4 +640,7 @@ class TaskService: task_trace=task_trace, task_offset=task_offset, ) - db.session.add(process_instance_error_detail) + + if add_to_db_session: + db.session.add(process_instance_error_detail) + return (process_instance_event, process_instance_error_detail) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 8ccd5456c..cabc67356 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,5 +1,9 @@ +from __future__ import annotations import copy import time +from abc import abstractmethod +from typing import Type, TypeVar +from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from typing import Callable from typing import Optional @@ -28,21 +32,69 @@ from spiffworkflow_backend.services.task_service import StartAndEndTimes from spiffworkflow_backend.services.task_service import TaskService + + +class WorkflowExecutionServiceError(WorkflowTaskException): # type: ignore + @classmethod + def from_workflow_task_exception( + cls, + workflow_task_exception: WorkflowTaskException, + ) -> WorkflowExecutionServiceError: + return cls( + error_msg=str(workflow_task_exception), + task=workflow_task_exception.task, + exception=workflow_task_exception, + line_number=workflow_task_exception.line_number, + offset=workflow_task_exception.offset, + error_line=workflow_task_exception.error_line, + ) + + +class ExecutionStrategyNotConfiguredError(Exception): + pass + + class EngineStepDelegate: """Interface of sorts for a concrete engine step delegate.""" + @abstractmethod def will_complete_task(self, spiff_task: SpiffTask) -> None: pass + @abstractmethod def did_complete_task(self, spiff_task: SpiffTask) -> None: pass + @abstractmethod def save(self, bpmn_process_instance: BpmnWorkflow, commit: bool = False) -> None: pass + @abstractmethod def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: pass + @abstractmethod + def on_exception(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + +class ExecutionStrategy: + """Interface of sorts for a concrete execution strategy.""" + + def __init__(self, delegate: EngineStepDelegate): + """__init__.""" + self.delegate = delegate + + @abstractmethod + def spiff_run(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: + pass + + def on_exception(self, bpmn_process_instance: BpmnWorkflow) -> None: + self.delegate.on_exception(bpmn_process_instance) + + def save(self, bpmn_process_instance: BpmnWorkflow) -> None: + self.delegate.save(bpmn_process_instance) + class TaskModelSavingDelegate(EngineStepDelegate): """Engine step delegate that takes care of saving a task model to the database. @@ -104,29 +156,12 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.secondary_engine_step_delegate.did_complete_task(spiff_task) def save(self, bpmn_process_instance: BpmnWorkflow, _commit: bool = True) -> None: - script_engine = bpmn_process_instance.script_engine - if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None: - failing_spiff_task = script_engine.failing_spiff_task - self.task_service.update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) - self.task_service.process_spiff_task_parent_subprocess_tasks(failing_spiff_task) - self.task_service.process_spiff_task_children(failing_spiff_task) - self.task_service.save_objects_to_database() if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.save(bpmn_process_instance, commit=False) db.session.commit() - def _add_children(self, spiff_task: SpiffTask) -> None: - for child_spiff_task in spiff_task.children: - self.spiff_tasks_to_process.add(child_spiff_task.id) - self._add_children(child_spiff_task) - - def _add_parents(self, spiff_task: SpiffTask) -> None: - if spiff_task.parent and spiff_task.parent.task_spec.name != "Root": - self.spiff_tasks_to_process.add(spiff_task.parent.id) - self._add_parents(spiff_task.parent) - def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: if self._should_update_task_model(): # NOTE: process-all-tasks: All tests pass with this but it's less efficient and would be nice to replace @@ -187,6 +222,19 @@ class TaskModelSavingDelegate(EngineStepDelegate): # self.task_service.process_spiff_task_children(self.last_completed_spiff_task) # self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task) + def on_exception(self, bpmn_process_instance: BpmnWorkflow) -> None: + self.after_engine_steps(bpmn_process_instance) + + def _add_children(self, spiff_task: SpiffTask) -> None: + for child_spiff_task in spiff_task.children: + self.spiff_tasks_to_process.add(child_spiff_task.id) + self._add_children(child_spiff_task) + + def _add_parents(self, spiff_task: SpiffTask) -> None: + if spiff_task.parent and spiff_task.parent.task_spec.name != "Root": + self.spiff_tasks_to_process.add(spiff_task.parent.id) + self._add_parents(spiff_task.parent) + def _should_update_task_model(self) -> bool: """We need to figure out if we have previously save task info on this process intance. @@ -196,20 +244,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): return True -class ExecutionStrategy: - """Interface of sorts for a concrete execution strategy.""" - - def __init__(self, delegate: EngineStepDelegate): - """__init__.""" - self.delegate = delegate - - def spiff_run(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: - pass - - def save(self, bpmn_process_instance: BpmnWorkflow) -> None: - self.delegate.save(bpmn_process_instance) - - class GreedyExecutionStrategy(ExecutionStrategy): """The common execution strategy. This will greedily run all engine steps without stopping.""" @@ -329,8 +363,12 @@ class WorkflowExecutionService: self.process_bpmn_messages() self.queue_waiting_receive_messages() + except WorkflowTaskException as wte: + TaskService.add_event_to_process_instance(self.process_instance_model, ProcessInstanceEventType.task_failed.value, exception=wte, task_guid=str(wte.task.id)) + self.execution_strategy.on_exception(self.bpmn_process_instance) + raise WorkflowExecutionServiceError.from_workflow_task_exception(wte) except SpiffWorkflowException as swe: - TaskService.add_event_to_process_instance(self.process_instance_model, ProcessInstanceEventType.task_failed.value, exception=swe, task_guid=str(swe.task.id)) + self.execution_strategy.on_exception(self.bpmn_process_instance) raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe finally: diff --git a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx index 9b9307f82..6cf486ede 100644 --- a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx +++ b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx @@ -69,6 +69,7 @@ type OwnProps = { readyOrWaitingProcessInstanceTasks?: Task[] | null; completedProcessInstanceTasks?: Task[] | null; cancelledProcessInstanceTasks?: Task[] | null; + erroredProcessInstanceTasks?: Task[] | null; saveDiagram?: (..._args: any[]) => any; onDeleteFile?: (..._args: any[]) => any; isPrimaryFile?: boolean; @@ -96,6 +97,7 @@ export default function ReactDiagramEditor({ readyOrWaitingProcessInstanceTasks, completedProcessInstanceTasks, cancelledProcessInstanceTasks, + erroredProcessInstanceTasks, saveDiagram, onDeleteFile, isPrimaryFile, @@ -457,6 +459,19 @@ export default function ReactDiagramEditor({ ); }); } + if (erroredProcessInstanceTasks) { + const bpmnProcessIdentifiers = getBpmnProcessIdentifiers( + canvas.getRootElement() + ); + erroredProcessInstanceTasks.forEach((erroredTask) => { + highlightBpmnIoElement( + canvas, + erroredTask, + 'errored-task-highlight', + bpmnProcessIdentifiers + ); + }); + } } function displayDiagram( diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index d98187cd3..78c034e51 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -146,6 +146,10 @@ code { fill: blue !important; opacity: .2; } +.errored-task-highlight:not(.djs-connection) .djs-visual > :nth-child(1) { + fill: red !important; + opacity: .2; +} .accordion-item-label { vertical-align: middle; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index f0b2f48cf..022469e6f 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -59,6 +59,7 @@ export interface TaskIds { completed: Task[]; readyOrWaiting: Task[]; cancelled: Task[]; + errored: Task[]; } export interface ProcessInstanceTask { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index c294ae486..263631eab 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -235,6 +235,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { completed: [], readyOrWaiting: [], cancelled: [], + errored: [], }; if (tasks) { tasks.forEach(function getUserTasksElement(task: Task) { @@ -244,6 +245,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { taskIds.readyOrWaiting.push(task); } else if (task.state === 'CANCELLED') { taskIds.cancelled.push(task); + } else if (task.state === 'ERROR') { + taskIds.errored.push(task); } return null; }); @@ -1159,6 +1162,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { readyOrWaitingProcessInstanceTasks={taskIds.readyOrWaiting} completedProcessInstanceTasks={taskIds.completed} cancelledProcessInstanceTasks={taskIds.cancelled} + erroredProcessInstanceTasks={taskIds.errored} diagramType="readonly" onElementClick={handleClickedDiagramTask} /> From 48ff72b00ba0efccea334811daf386240e36b0c0 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 12:05:35 -0400 Subject: [PATCH 07/11] pass full tasks list to react diagram editor for coloring w/ burnettk --- .../src/components/ErrorDisplay.tsx | 23 +++++- .../src/components/ReactDiagramEditor.tsx | 81 ++++++------------- spiffworkflow-frontend/src/interfaces.ts | 8 +- .../src/routes/ProcessInstanceLogList.tsx | 21 +++-- .../src/routes/ProcessInstanceShow.tsx | 31 +------ 5 files changed, 56 insertions(+), 108 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx index 40abec5d5..67e6e18dc 100644 --- a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx +++ b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx @@ -1,6 +1,10 @@ import { Notification } from './Notification'; import useAPIError from '../hooks/UseApiError'; -import { ErrorForDisplay } from '../interfaces'; +import { + ErrorForDisplay, + ProcessInstanceEventErrorDetail, + ProcessInstanceLogEntry, +} from '../interfaces'; function errorDetailDisplay( errorObject: any, @@ -19,6 +23,23 @@ function errorDetailDisplay( return null; } +export const errorForDisplayFromProcessInstanceErrorDetail = ( + processInstanceEvent: ProcessInstanceLogEntry, + processInstanceErrorEventDetail: ProcessInstanceEventErrorDetail +) => { + const errorForDisplay: ErrorForDisplay = { + message: processInstanceErrorEventDetail.message, + messageClassName: 'failure-string', + task_name: processInstanceEvent.task_definition_name, + task_id: processInstanceEvent.task_definition_identifier, + line_number: processInstanceErrorEventDetail.task_line_number, + error_line: processInstanceErrorEventDetail.task_line_contents, + task_trace: processInstanceErrorEventDetail.task_trace, + stacktrace: processInstanceErrorEventDetail.stacktrace, + }; + return errorForDisplay; +}; + export const childrenForErrorObject = (errorObject: ErrorForDisplay) => { let sentryLinkTag = null; if (errorObject.sentry_link) { diff --git a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx index 6cf486ede..ad583d92c 100644 --- a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx +++ b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx @@ -66,10 +66,7 @@ import { usePermissionFetcher } from '../hooks/PermissionService'; type OwnProps = { processModelId: string; diagramType: string; - readyOrWaitingProcessInstanceTasks?: Task[] | null; - completedProcessInstanceTasks?: Task[] | null; - cancelledProcessInstanceTasks?: Task[] | null; - erroredProcessInstanceTasks?: Task[] | null; + tasks?: Task[] | null; saveDiagram?: (..._args: any[]) => any; onDeleteFile?: (..._args: any[]) => any; isPrimaryFile?: boolean; @@ -94,10 +91,7 @@ type OwnProps = { export default function ReactDiagramEditor({ processModelId, diagramType, - readyOrWaitingProcessInstanceTasks, - completedProcessInstanceTasks, - cancelledProcessInstanceTasks, - erroredProcessInstanceTasks, + tasks, saveDiagram, onDeleteFile, isPrimaryFile, @@ -420,56 +414,29 @@ export default function ReactDiagramEditor({ // highlighting a field // Option 3 at: // https://github.com/bpmn-io/bpmn-js-examples/tree/master/colors - if (readyOrWaitingProcessInstanceTasks) { + if (tasks) { const bpmnProcessIdentifiers = getBpmnProcessIdentifiers( canvas.getRootElement() ); - readyOrWaitingProcessInstanceTasks.forEach((readyOrWaitingBpmnTask) => { - highlightBpmnIoElement( - canvas, - readyOrWaitingBpmnTask, - 'active-task-highlight', - bpmnProcessIdentifiers - ); - }); - } - if (completedProcessInstanceTasks) { - const bpmnProcessIdentifiers = getBpmnProcessIdentifiers( - canvas.getRootElement() - ); - completedProcessInstanceTasks.forEach((completedTask) => { - highlightBpmnIoElement( - canvas, - completedTask, - 'completed-task-highlight', - bpmnProcessIdentifiers - ); - }); - } - if (cancelledProcessInstanceTasks) { - const bpmnProcessIdentifiers = getBpmnProcessIdentifiers( - canvas.getRootElement() - ); - cancelledProcessInstanceTasks.forEach((cancelledTask) => { - highlightBpmnIoElement( - canvas, - cancelledTask, - 'cancelled-task-highlight', - bpmnProcessIdentifiers - ); - }); - } - if (erroredProcessInstanceTasks) { - const bpmnProcessIdentifiers = getBpmnProcessIdentifiers( - canvas.getRootElement() - ); - erroredProcessInstanceTasks.forEach((erroredTask) => { - highlightBpmnIoElement( - canvas, - erroredTask, - 'errored-task-highlight', - bpmnProcessIdentifiers - ); + tasks.forEach((task: Task) => { + let className = ''; + if (task.state === 'COMPLETED') { + className = 'completed-task-highlight'; + } else if (task.state === 'READY' || task.state === 'WAITING') { + className = 'active-task-highlight'; + } else if (task.state === 'CANCELLED') { + className = 'cancelled-task-highlight'; + } else if (task.state === 'ERROR') { + className = 'errored-task-highlight'; + } + if (className) { + highlightBpmnIoElement( + canvas, + task, + className, + bpmnProcessIdentifiers + ); + } }); } } @@ -549,10 +516,8 @@ export default function ReactDiagramEditor({ diagramType, diagramXML, diagramXMLString, - readyOrWaitingProcessInstanceTasks, - completedProcessInstanceTasks, - cancelledProcessInstanceTasks, fileName, + tasks, performingXmlUpdates, processModelId, url, diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 022469e6f..bdbc9251e 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -37,6 +37,7 @@ export interface EventDefinition { message_var?: string; } +// TODO: merge with ProcessInstanceTask export interface Task { id: number; guid: string; @@ -55,13 +56,6 @@ export interface Task { event_definition?: EventDefinition; } -export interface TaskIds { - completed: Task[]; - readyOrWaiting: Task[]; - cancelled: Task[]; - errored: Task[]; -} - export interface ProcessInstanceTask { id: string; task_id: string; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index dac849ac6..02100e81a 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -32,14 +32,16 @@ import { import HttpService from '../services/HttpService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { - ErrorForDisplay, PermissionsToCheck, ProcessInstanceEventErrorDetail, ProcessInstanceLogEntry, } from '../interfaces'; import Filters from '../components/Filters'; import { usePermissionFetcher } from '../hooks/PermissionService'; -import { childrenForErrorObject } from '../components/ErrorDisplay'; +import { + childrenForErrorObject, + errorForDisplayFromProcessInstanceErrorDetail, +} from '../components/ErrorDisplay'; type OwnProps = { variant: string; @@ -158,17 +160,12 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { ); if (eventErrorDetails) { - const errorForDisplay: ErrorForDisplay = { - message: eventErrorDetails.message, - messageClassName: 'failure-string', - task_name: eventForModal.task_definition_name, - task_id: eventForModal.task_definition_identifier, - line_number: eventErrorDetails.task_line_number, - error_line: eventErrorDetails.task_line_contents, - task_trace: eventErrorDetails.task_trace, - stacktrace: eventErrorDetails.stacktrace, - }; + const errorForDisplay = errorForDisplayFromProcessInstanceErrorDetail( + eventForModal, + eventErrorDetails + ); const errorChildren = childrenForErrorObject(errorForDisplay); + // eslint-disable-next-line react/jsx-no-useless-fragment errorMessageTag = <>{errorChildren}; } return ( diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 263631eab..170673bf0 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -47,7 +47,6 @@ import { ProcessInstanceMetadata, Task, TaskDefinitionPropertiesJson, - TaskIds, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessInstanceClass from '../classes/ProcessInstanceClass'; @@ -230,30 +229,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }); }; - const getTaskIds = () => { - const taskIds: TaskIds = { - completed: [], - readyOrWaiting: [], - cancelled: [], - errored: [], - }; - if (tasks) { - tasks.forEach(function getUserTasksElement(task: Task) { - if (task.state === 'COMPLETED') { - taskIds.completed.push(task); - } else if (task.state === 'READY' || task.state === 'WAITING') { - taskIds.readyOrWaiting.push(task); - } else if (task.state === 'CANCELLED') { - taskIds.cancelled.push(task); - } else if (task.state === 'ERROR') { - taskIds.errored.push(task); - } - return null; - }); - } - return taskIds; - }; - const currentToTaskGuid = () => { if (taskToTimeTravelTo) { return taskToTimeTravelTo.guid; @@ -1101,7 +1076,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; if (processInstance && (tasks || tasksCallHadError)) { - const taskIds = getTaskIds(); const processModelId = unModifyProcessIdentifierForPathParam( params.process_model_id ? params.process_model_id : '' ); @@ -1159,10 +1133,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processModelId={processModelId || ''} diagramXML={processInstance.bpmn_xml_file_contents || ''} fileName={processInstance.bpmn_xml_file_contents || ''} - readyOrWaitingProcessInstanceTasks={taskIds.readyOrWaiting} - completedProcessInstanceTasks={taskIds.completed} - cancelledProcessInstanceTasks={taskIds.cancelled} - erroredProcessInstanceTasks={taskIds.errored} + tasks={tasks} diagramType="readonly" onElementClick={handleClickedDiagramTask} /> From f082f0966c4b5a7629692a4d1cf12a89752daa59 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 13:01:05 -0400 Subject: [PATCH 08/11] flake8 and mypy fixes w/ burnettk --- .../models/process_instance_error_detail.py | 8 ++++-- .../models/process_instance_event.py | 2 +- .../process_instance_events_controller.py | 2 +- .../services/error_handling_service.py | 9 ++---- .../services/process_instance_processor.py | 17 ++++------- .../process_instance_queue_service.py | 10 ++++--- .../services/process_instance_service.py | 4 ++- .../services/task_service.py | 23 +++++++++------ .../services/workflow_execution_service.py | 28 +++++++++---------- .../unit/test_error_handling_service.py | 2 +- 10 files changed, 55 insertions(+), 50 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py index 743cb5fd5..804388bbb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_error_detail.py @@ -1,9 +1,11 @@ from dataclasses import dataclass from typing import Optional -from sqlalchemy.orm import relationship -from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel + from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @dataclass @@ -12,7 +14,7 @@ class ProcessInstanceErrorDetailModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) process_instance_event_id: int = db.Column(ForeignKey("process_instance_event.id"), nullable=False, index=True) - process_instance_event = relationship('ProcessInstanceEventModel') # type: ignore + process_instance_event = relationship("ProcessInstanceEventModel") # type: ignore message: str = db.Column(db.String(1024), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py index 0b3738169..684c9e086 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -1,9 +1,9 @@ from __future__ import annotations -from sqlalchemy.orm import relationship from typing import Any from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship from sqlalchemy.orm import validates from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py index d829434e7..5831632b6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instance_events_controller.py @@ -1,11 +1,11 @@ from typing import Optional -from spiffworkflow_backend.exceptions.api_error import ApiError import flask.wrappers from flask import jsonify from flask import make_response from sqlalchemy import and_ +from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py index 1ebc5202b..6d0951043 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py @@ -1,11 +1,6 @@ -from typing import Union -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType -from spiffworkflow_backend.services.task_service import TaskService - from flask import current_app from flask import g -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -35,7 +30,9 @@ class ErrorHandlingService: current_app.logger.error(e) @classmethod - def _update_process_instance_in_database(cls, process_instance: ProcessInstanceModel, fault_or_suspend_on_exception: str) -> None: + def _update_process_instance_in_database( + cls, process_instance: ProcessInstanceModel, fault_or_suspend_on_exception: str + ) -> None: # First, suspend or fault the instance if fault_or_suspend_on_exception == "suspend": cls._set_instance_status( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index c4961722e..5e63fd099 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,10 +1,7 @@ """Process_instance_processor.""" - # TODO: clean up this service for a clear distinction between it and the process_instance_service # where this points to the pi service -import traceback import _strptime # type: ignore -from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel import copy import decimal import json @@ -29,7 +26,6 @@ from uuid import UUID import dateparser import pytz from flask import current_app -from flask import g from lxml import etree # type: ignore from lxml.etree import XMLSyntaxError # type: ignore from RestrictedPython import safe_globals # type: ignore @@ -80,7 +76,6 @@ from spiffworkflow_backend.models.message_instance_correlation import ( ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_metadata import ( ProcessInstanceMetadataModel, @@ -107,10 +102,8 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService from spiffworkflow_backend.services.user_service import UserService -from spiffworkflow_backend.services.workflow_execution_service import ( - ExecutionStrategyNotConfiguredError, - execution_strategy_named, -) +from spiffworkflow_backend.services.workflow_execution_service import execution_strategy_named +from spiffworkflow_backend.services.workflow_execution_service import ExecutionStrategyNotConfiguredError from spiffworkflow_backend.services.workflow_execution_service import ( TaskModelSavingDelegate, ) @@ -228,6 +221,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) self.state.update(context) try: exec(script, self.state) # noqa + return True finally: # since the task data is not directly mutated when the script executes, need to determine which keys # have been deleted from the environment and remove them from task data if present. @@ -241,7 +235,6 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) # the task data needs to be updated with the current state so data references can be resolved properly. # the state will be removed later once the task is completed. context.update(self.state) - return True def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: keys_to_filter = self.non_user_defined_keys @@ -1682,7 +1675,9 @@ class ProcessInstanceProcessor: if execution_strategy_name is None: execution_strategy_name = current_app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"] if execution_strategy_name is None: - raise ExecutionStrategyNotConfiguredError("SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB has not been set") + raise ExecutionStrategyNotConfiguredError( + "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB has not been set" + ) execution_strategy = execution_strategy_named(execution_strategy_name, task_model_delegate) execution_service = WorkflowExecutionService( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index 587b6e80c..31e7d7254 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -1,8 +1,5 @@ import contextlib import time -from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType -from spiffworkflow_backend.services.task_service import TaskService from typing import Generator from typing import List from typing import Optional @@ -10,12 +7,15 @@ from typing import Optional from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_queue import ( ProcessInstanceQueueModel, ) from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, ) +from spiffworkflow_backend.services.task_service import TaskService +from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError class ProcessInstanceIsNotEnqueuedError(Exception): @@ -103,7 +103,9 @@ class ProcessInstanceQueueService: # these events are handled in the WorkflowExecutionService. # that is, we don't need to add error_detail records here, etc. if not isinstance(ex, WorkflowExecutionServiceError): - TaskService.add_event_to_process_instance(process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex) + TaskService.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex + ) db.session.commit() raise ex finally: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index aba946c7e..3a6111d94 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -127,7 +127,9 @@ class ProcessInstanceService: current_app.logger.error(error_message) @classmethod - def run_process_intance_with_processor(cls, process_instance: ProcessInstanceModel, status_value: Optional[str] = None) -> Optional[ProcessInstanceProcessor]: + def run_process_intance_with_processor( + cls, process_instance: ProcessInstanceModel, status_value: Optional[str] = None + ) -> Optional[ProcessInstanceProcessor]: processor = None with ProcessInstanceQueueService.dequeued(process_instance): processor = ProcessInstanceProcessor(process_instance) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 8e5a95cdd..27d56d358 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -1,12 +1,7 @@ import copy import json -from spiffworkflow_backend.exceptions.api_error import ApiError -from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore -from flask import g -from spiffworkflow_backend.models import process_instance_error_detail -from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel -import traceback import time +import traceback from hashlib import sha256 from typing import Optional from typing import Tuple @@ -14,19 +9,23 @@ from typing import TypedDict from uuid import UUID from flask import current_app +from flask import g from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer +from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert +from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.task import TaskModel # noqa: F401 @@ -162,7 +161,13 @@ class TaskService: if task_model.state == "COMPLETED": event_type = ProcessInstanceEventType.task_completed.value timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() - process_instance_event, _process_instance_error_detail = TaskService.add_event_to_process_instance(self.process_instance, event_type, task_guid=task_model.guid, timestamp=timestamp, add_to_db_session=False) + process_instance_event, _process_instance_error_detail = TaskService.add_event_to_process_instance( + self.process_instance, + event_type, + task_guid=task_model.guid, + timestamp=timestamp, + add_to_db_session=False, + ) self.process_instance_events[task_model.guid] = process_instance_event self.update_bpmn_process(spiff_task.workflow, bpmn_process) @@ -625,7 +630,9 @@ class TaskService: task_line_contents = None task_trace = None task_offset = None - if isinstance(exception, WorkflowTaskException) or (isinstance(exception, ApiError) and exception.error_code == 'task_error'): + if isinstance(exception, WorkflowTaskException) or ( + isinstance(exception, ApiError) and exception.error_code == "task_error" + ): task_line_number = exception.line_number task_line_contents = exception.error_line task_trace = exception.task_trace diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index cabc67356..c8e6dd001 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,18 +1,15 @@ from __future__ import annotations + import copy import time from abc import abstractmethod -from typing import Type, TypeVar -from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from typing import Callable -from typing import Optional -from typing import Set from uuid import UUID from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore +from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState @@ -23,7 +20,7 @@ from spiffworkflow_backend.models.message_instance_correlation import ( MessageInstanceCorrelationRuleModel, ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel -from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401 +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, @@ -32,8 +29,6 @@ from spiffworkflow_backend.services.task_service import StartAndEndTimes from spiffworkflow_backend.services.task_service import TaskService - - class WorkflowExecutionServiceError(WorkflowTaskException): # type: ignore @classmethod def from_workflow_task_exception( @@ -107,17 +102,17 @@ class TaskModelSavingDelegate(EngineStepDelegate): serializer: BpmnWorkflowSerializer, process_instance: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict, - secondary_engine_step_delegate: Optional[EngineStepDelegate] = None, + secondary_engine_step_delegate: EngineStepDelegate | None = None, ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings self.serializer = serializer - self.current_task_start_in_seconds: Optional[float] = None + self.current_task_start_in_seconds: float | None = None - self.last_completed_spiff_task: Optional[SpiffTask] = None - self.spiff_tasks_to_process: Set[UUID] = set() + self.last_completed_spiff_task: SpiffTask | None = None + self.spiff_tasks_to_process: set[UUID] = set() self.spiff_task_timestamps: dict[UUID, StartAndEndTimes] = {} self.task_service = TaskService( @@ -364,9 +359,14 @@ class WorkflowExecutionService: self.process_bpmn_messages() self.queue_waiting_receive_messages() except WorkflowTaskException as wte: - TaskService.add_event_to_process_instance(self.process_instance_model, ProcessInstanceEventType.task_failed.value, exception=wte, task_guid=str(wte.task.id)) + TaskService.add_event_to_process_instance( + self.process_instance_model, + ProcessInstanceEventType.task_failed.value, + exception=wte, + task_guid=str(wte.task.id), + ) self.execution_strategy.on_exception(self.bpmn_process_instance) - raise WorkflowExecutionServiceError.from_workflow_task_exception(wte) + raise WorkflowExecutionServiceError.from_workflow_task_exception(wte) from wte except SpiffWorkflowException as swe: self.execution_strategy.on_exception(self.bpmn_process_instance) raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index adbd22408..cff9c0b17 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -36,7 +36,7 @@ class TestErrorHandlingService(BaseTest): pip = ProcessInstanceProcessor(process_instance) with pytest.raises(ApiError) as e: pip.do_engine_steps(save=True) - ErrorHandlingService().handle_error(pip, e.value) + ErrorHandlingService().handle_error(process_instance, e.value) return process_instance def test_handle_error_suspends_or_faults_process( From 92c21f2c11405d675beb73db55a41e4dee0db2d4 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 13:32:45 -0400 Subject: [PATCH 09/11] fixed tests and added test to ensure we are saving error events to the db on error w/ burnettk --- .../integration/test_process_api.py | 4 +-- .../scripts/test_refresh_permissions.py | 7 ++--- .../unit/test_error_handling_service.py | 4 +-- .../unit/test_process_instance_processor.py | 26 ++++++++++++++++--- .../unit/test_restricted_script_engine.py | 20 +++++--------- 5 files changed, 34 insertions(+), 27 deletions(-) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index c5623f47b..6eb6c74e5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2019,7 +2019,7 @@ class TestProcessApi(BaseTest): assert response.status_code == 400 api_error = json.loads(response.get_data(as_text=True)) - assert api_error["error_code"] == "task_error" + assert api_error["error_code"] == "unexpected_workflow_exception" assert 'TypeError:can only concatenate str (not "int") to str' in api_error["message"] process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() @@ -2099,7 +2099,7 @@ class TestProcessApi(BaseTest): processor = ProcessInstanceProcessor(process_instance) spiff_task = processor.get_task_by_bpmn_identifier("script_task_two", processor.bpmn_process_instance) assert spiff_task is not None - assert spiff_task.state == TaskState.WAITING + assert spiff_task.state == TaskState.ERROR assert spiff_task.data == {"my_var": "THE VAR"} def test_process_model_file_create( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py index 20176dd84..225e870f5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py @@ -5,22 +5,19 @@ from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError class TestRefreshPermissions(BaseTest): - """TestRefreshPermissions.""" - def test_refresh_permissions_requires_elevated_permission( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, ) -> None: - """Test_refresh_permissions_requires_elevated_permission.""" basic_user = self.find_or_create_user("basic_user") privileged_user = self.find_or_create_user("privileged_user") self.add_permissions_to_user( @@ -38,7 +35,7 @@ class TestRefreshPermissions(BaseTest): processor = ProcessInstanceProcessor(process_instance) - with pytest.raises(ApiError) as exception: + with pytest.raises(WorkflowExecutionServiceError) as exception: processor.do_engine_steps(save=True) assert "ScriptUnauthorizedForUserError" in str(exception) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index cff9c0b17..33eb86cc0 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -5,7 +5,6 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend import db -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -19,6 +18,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError class TestErrorHandlingService(BaseTest): @@ -34,7 +34,7 @@ class TestErrorHandlingService(BaseTest): process_model.id, user ) pip = ProcessInstanceProcessor(process_instance) - with pytest.raises(ApiError) as e: + with pytest.raises(WorkflowExecutionServiceError) as e: pip.do_engine_steps(save=True) ErrorHandlingService().handle_error(process_instance, e.value) return process_instance diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index d0d4eb730..0dc65e9d0 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -10,12 +10,12 @@ from SpiffWorkflow.task import TaskState from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel @@ -29,6 +29,7 @@ from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) +from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError class TestProcessInstanceProcessor(BaseTest): @@ -713,7 +714,7 @@ class TestProcessInstanceProcessor(BaseTest): spiff_task = processor.get_task_by_guid(human_task_three.task_id) ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_three) - def test_task_data_is_set_even_if_process_instance_errors( + def test_task_data_is_set_even_if_process_instance_errors_and_creates_task_failed_event( self, app: Flask, client: FlaskClient, @@ -731,7 +732,7 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) - with pytest.raises(ApiError): + with pytest.raises(WorkflowExecutionServiceError): processor.do_engine_steps(save=True) process_instance_final = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() @@ -741,5 +742,22 @@ class TestProcessInstanceProcessor(BaseTest): "script_task_two", processor_final.bpmn_process_instance ) assert spiff_task is not None - assert spiff_task.state == TaskState.WAITING + assert spiff_task.state == TaskState.ERROR assert spiff_task.data == {"my_var": "THE VAR"} + + process_instance_events = process_instance.process_instance_events + assert len(process_instance_events) == 4 + error_events = [ + e for e in process_instance_events if e.event_type == ProcessInstanceEventType.task_failed.value + ] + assert len(error_events) == 1 + error_event = error_events[0] + assert error_event.task_guid is not None + process_instance_error_details = error_event.error_details + assert len(process_instance_error_details) == 1 + error_detail = process_instance_error_details[0] + assert error_detail.message == "NameError:name 'hey' is not defined. Did you mean 'my_var'?" + assert error_detail.task_offset is None + assert error_detail.task_line_number == 1 + assert error_detail.task_line_contents == "hey" + assert error_detail.task_trace is not None diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py index 330d115f0..d8b90a8af 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py @@ -5,24 +5,21 @@ from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError -class TestOpenFile(BaseTest): - """TestVariousBpmnConstructs.""" - - def test_dot_notation( +class TestRestrictedScriptEngine(BaseTest): + def test_dot_notation_with_open_file( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_form_data_conversion_to_dot_dict.""" self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", @@ -34,22 +31,17 @@ class TestOpenFile(BaseTest): process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) - with pytest.raises(ApiError) as exception: + with pytest.raises(WorkflowExecutionServiceError) as exception: processor.do_engine_steps(save=True) assert "name 'open' is not defined" in str(exception.value) - -class TestImportModule(BaseTest): - """TestVariousBpmnConstructs.""" - - def test_dot_notation( + def test_dot_notation_with_import_module( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_form_data_conversion_to_dot_dict.""" self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", @@ -61,6 +53,6 @@ class TestImportModule(BaseTest): process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) - with pytest.raises(ApiError) as exception: + with pytest.raises(WorkflowExecutionServiceError) as exception: processor.do_engine_steps(save=True) assert "Import not allowed: os" in str(exception.value) From 2dc3b0a76ee136c174a83424d9c10dd79672b2a2 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 14:49:34 -0400 Subject: [PATCH 10/11] updated SpiffWorkflow w/ burnettk --- spiffworkflow-backend/poetry.lock | 6 +++--- spiffworkflow-backend/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index e23c398ec..f600d2d3b 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -3300,8 +3300,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "feature/new-task-states" -resolved_reference = "a04fdd3113cfce1f1360c4c7e998ec3f66769a11" +reference = "main" +resolved_reference = "73886584b17c7d11a9713d0c4526ed41e411fc45" [[package]] name = "sqlalchemy" @@ -3934,4 +3934,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "4ae3c31115f378193f33eb9b27d376fcf0f7c20fba54ed5c921f37a4778b8d09" +content-hash = "994c36ab39238500b4fd05bc1ccdd2d729dd5f66749ab77b1028371147bdf753" diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index bfa0ad071..d4288f5b7 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,7 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/new-task-states"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} # SpiffWorkflow = {develop = true, path = "../../SpiffWorkflow" } sentry-sdk = "^1.10" From e7f0bbbce63db0d80f37c0cdd3da20cbd0a0e80e Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 20 Apr 2023 15:00:12 -0400 Subject: [PATCH 11/11] fixed sqlalchemy warning w/ burnettk --- .../spiffworkflow_backend/models/process_instance_event.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py index 684c9e086..3a883e5c3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -39,7 +39,9 @@ class ProcessInstanceEventModel(SpiffworkflowBaseDBModel): user_id = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore - error_details = relationship("ProcessInstanceErrorDetailModel", cascade="delete") # type: ignore + error_details = relationship( + "ProcessInstanceErrorDetailModel", back_populates="process_instance_event", cascade="delete" + ) # type: ignore @validates("event_type") def validate_event_type(self, key: str, value: Any) -> Any: