This commit is contained in:
jasquat 2023-04-24 13:45:31 -04:00
parent f14d05c793
commit 0a30267cb4
No known key found for this signature in database
9 changed files with 82 additions and 62 deletions

View File

@ -1,8 +1,5 @@
"""API Error functionality."""
from __future__ import annotations
from spiffworkflow_backend.services.task_service import TaskService
from typing import Optional
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
import json
from dataclasses import dataclass
@ -24,11 +21,13 @@ from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
from SpiffWorkflow.task import Task # type: ignore
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
from spiffworkflow_backend.services.task_service import TaskModelException
from spiffworkflow_backend.services.task_service import TaskService
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
@ -40,18 +39,18 @@ class ApiError(Exception):
error_code: str
message: str
error_line: Optional[str] = ""
error_type: Optional[str] = ""
file_name: Optional[str] = ""
line_number: Optional[int] = 0
offset: Optional[int] = 0
sentry_link: Optional[str] = None
status_code: Optional[int] = 400
tag: Optional[str] = ""
task_data: Optional[dict | str] = field(default_factory=dict)
task_id: Optional[str] = ""
task_name: Optional[str] = ""
task_trace: Optional[list] = field(default_factory=list)
error_line: str | None = ""
error_type: str | None = ""
file_name: str | None = ""
line_number: int | None = 0
offset: int | None = 0
sentry_link: str | None = None
status_code: int | None = 400
tag: str | None = ""
task_data: dict | str | None = field(default_factory=dict)
task_id: str | None = ""
task_name: str | None = ""
task_trace: list | None = field(default_factory=list)
def __str__(self) -> str:
"""Instructions to print instance as a string."""
@ -106,12 +105,12 @@ class ApiError(Exception):
error_code: str,
message: str,
task_model: TaskModel,
status_code: Optional[int] = 400,
line_number: Optional[int] = 0,
offset: Optional[int] = 0,
error_type: Optional[str] = "",
error_line: Optional[str] = "",
task_trace: Optional[list] = None,
status_code: int | None = 400,
line_number: int | None = 0,
offset: int | None = 0,
error_type: str | None = "",
error_line: str | None = "",
task_trace: list | None = None,
) -> ApiError:
"""Constructs an API Error with details pulled from the current task model."""
instance = cls(error_code, message, status_code=status_code)
@ -130,8 +129,8 @@ class ApiError(Exception):
try:
spec_reference = TaskService.get_spec_reference_from_bpmn_process(task_model.bpmn_process)
instance.file_name = spec_reference.file_name
except Exception:
pass
except Exception as exception:
current_app.logger.error(exception)
# Assure that there is nothing in the json data that can't be serialized.
instance.task_data = ApiError.remove_unserializeable_from_dict(task_model.get_data())

View File

@ -1,6 +1,5 @@
"""Task."""
import enum
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from dataclasses import dataclass
from typing import Any
from typing import Optional
@ -96,7 +95,6 @@ class TaskModel(SpiffworkflowBaseDBModel):
return JsonDataModel.find_data_dict_by_hash(self.json_data_hash)
class Task:
"""Task."""

View File

@ -1,6 +1,5 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
import os
import uuid
from sys import exc_info
@ -66,9 +65,11 @@ from spiffworkflow_backend.services.process_instance_queue_service import (
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskModelException, TaskService
from spiffworkflow_backend.services.task_service import TaskModelException
from spiffworkflow_backend.services.task_service import TaskService
class TaskDataSelectOption(TypedDict):

View File

@ -2,7 +2,6 @@
# TODO: clean up this service for a clear distinction between it and the process_instance_service
# where this points to the pi service
import _strptime # type: ignore
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
import copy
import decimal
import json
@ -97,6 +96,7 @@ from spiffworkflow_backend.services.element_units_service import (
)
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -1197,7 +1197,9 @@ class ProcessInstanceProcessor:
db.session.bulk_save_objects(new_task_models.values())
TaskService.insert_or_update_json_data_records(new_json_data_dicts)
ProcessInstanceTmpService.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id)
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, event_type, task_guid=task_id
)
self.save()
# Saving the workflow seems to reset the status
self.suspend()

View File

@ -15,7 +15,6 @@ from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.task_service import TaskService
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError

View File

@ -1,16 +1,18 @@
from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel
import traceback
from spiffworkflow_backend.models.db import db
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from typing import Tuple
import time
from flask import g
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
import traceback
from typing import Optional
from typing import Tuple
from flask import g
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
class ProcessInstanceTmpService():
class ProcessInstanceTmpService:
"""Temporary service to hold methods that should eventually be moved to ProcessInstanceService.
These methods cannot live there due to circular import issues with the ProcessInstanceProcessor.
@ -52,14 +54,15 @@ class ProcessInstanceTmpService():
task_line_contents = None
task_trace = None
task_offset = None
# check for the class name string for ApiError to avoid circular imports
if isinstance(exception, WorkflowTaskException) or (
exception.__class__.__name__ == 'ApiError' and exception.error_code == "task_error"
exception.__class__.__name__ == "ApiError" and exception.error_code == "task_error" # type: ignore
):
task_line_number = exception.line_number
task_line_contents = exception.error_line[0:255]
task_trace = exception.task_trace
task_offset = exception.offset
task_line_number = exception.line_number # type: ignore
task_line_contents = exception.error_line[0:255] # type: ignore
task_trace = exception.task_trace # type: ignore
task_offset = exception.offset # type: ignore
process_instance_error_detail = ProcessInstanceErrorDetailModel(
process_instance_event=process_instance_event,

View File

@ -1,20 +1,17 @@
import copy
import json
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from typing import Union
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
import time
from hashlib import sha256
from typing import Optional
from typing import Tuple
from typing import TypedDict
from typing import Union
from uuid import UUID
from flask import current_app
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.task import TaskStateNames
@ -23,13 +20,17 @@ from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
class StartAndEndTimes(TypedDict):
@ -48,9 +49,15 @@ class TaskModelException(Exception):
Reimplements the exception from SpiffWorkflow to not require a spiff_task.
"""
def __init__(self, error_msg: str, task_model: TaskModel, exception: Optional[Exception]=None,
line_number: Optional[int]=None, offset: Optional[int]=None, error_line: Optional[str]=None):
def __init__(
self,
error_msg: str,
task_model: TaskModel,
exception: Optional[Exception] = None,
line_number: Optional[int] = None,
offset: Optional[int] = None,
error_line: Optional[str] = None,
):
self.task_model = task_model
self.line_number = line_number
self.offset = offset
@ -66,7 +73,9 @@ class TaskModelException(Exception):
self.line_number = exception.lineno
self.offset = exception.offset
elif isinstance(exception, NameError):
self.add_note(WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys())))
self.add_note(
WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys()))
)
# If encountered in a sub-workflow, this traces back up the stack,
# so we can tell how we got to this particular task, no matter how
@ -78,6 +87,7 @@ class TaskModelException(Exception):
self.notes.append(note)
def __str__(self) -> str:
"""Add notes to the error message."""
return super().__str__() + ". " + ". ".join(self.notes)
@classmethod
@ -92,7 +102,9 @@ class TaskModelException(Exception):
caller_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
bpmn_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first()
spec_reference = TaskService.get_spec_reference_from_bpmn_process(bpmn_process)
task_trace.append(f"{TaskService.get_name_for_display(caller_task_model.task_definition)} ({spec_reference.file_name})")
task_trace.append(
f"{TaskService.get_name_for_display(caller_task_model.task_definition)} ({spec_reference.file_name})"
)
return task_trace
@ -216,12 +228,14 @@ class TaskService:
if task_model.state == "COMPLETED":
event_type = ProcessInstanceEventType.task_completed.value
timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time()
process_instance_event, _process_instance_error_detail = ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance,
event_type,
task_guid=task_model.guid,
timestamp=timestamp,
add_to_db_session=False,
process_instance_event, _process_instance_error_detail = (
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance,
event_type,
task_guid=task_model.guid,
timestamp=timestamp,
add_to_db_session=False,
)
)
self.process_instance_events[task_model.guid] = process_instance_event
@ -703,7 +717,9 @@ class TaskService:
This involves several queries so avoid calling in a tight loop.
"""
bpmn_process_definition = bpmn_process.bpmn_process_definition
spec_reference: Optional[SpecReferenceCache] = SpecReferenceCache.query.filter_by(identifier=bpmn_process_definition.bpmn_identifier, type='process').first()
spec_reference: Optional[SpecReferenceCache] = SpecReferenceCache.query.filter_by(
identifier=bpmn_process_definition.bpmn_identifier, type="process"
).first()
if spec_reference is None:
raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {bpmn_process_definition.bpmn_identifier}"

View File

@ -1,5 +1,4 @@
from __future__ import annotations
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
import copy
import time
@ -26,6 +25,7 @@ from spiffworkflow_backend.services.assertion_service import safe_assertion
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.task_service import StartAndEndTimes
from spiffworkflow_backend.services.task_service import TaskService

View File

@ -253,7 +253,9 @@ class TestProcessInstanceProcessor(BaseTest):
processor = ProcessInstanceProcessor(process_instance)
# this task will be found within subprocesses
spiff_task = processor.__class__.get_task_by_bpmn_identifier("level_3_script_task", processor.bpmn_process_instance)
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
"level_3_script_task", processor.bpmn_process_instance
)
assert spiff_task is not None
assert spiff_task.state == TaskState.COMPLETED