backend tests are passing w/ burnettk

This commit is contained in:
jasquat 2023-04-24 12:41:33 -04:00
parent ecade1631a
commit afc7391806
No known key found for this signature in database
9 changed files with 181 additions and 134 deletions

View File

@ -1,7 +1,8 @@
"""API Error functionality."""
from __future__ import annotations
from spiffworkflow_backend.services.task_service import TaskService
from typing import Optional
from spiffworkflow_backend.models.task import TaskModelException, TaskModel # noqa: F401
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
import json
from dataclasses import dataclass
@ -27,6 +28,7 @@ from spiffworkflow_backend.services.authentication_service import NotAuthorizedE
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
from spiffworkflow_backend.services.task_service import TaskModelException
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
@ -116,17 +118,20 @@ class ApiError(Exception):
task_definition = task_model.task_definition
instance.task_id = task_definition.bpmn_identifier
instance.task_name = task_definition.bpmn_name or ""
# TODO: find a way to get a file from task model
# instance.file_name = task.workflow.spec.file or ""
instance.line_number = line_number
instance.offset = offset
instance.error_type = error_type
instance.error_line = error_line
if task_trace:
instance.task_trace = task_trace
# TODO: needs implementation
# else:
# instance.task_trace = TaskModelException.get_task_trace(task)
else:
instance.task_trace = TaskModelException.get_task_trace(task_model)
try:
spec_reference = TaskService.get_spec_reference_from_bpmn_process(task_model.bpmn_process)
instance.file_name = spec_reference.file_name
except Exception:
pass
# Assure that there is nothing in the json data that can't be serialized.
instance.task_data = ApiError.remove_unserializeable_from_dict(task_model.get_data())

View File

@ -95,59 +95,6 @@ class TaskModel(SpiffworkflowBaseDBModel):
return JsonDataModel.find_data_dict_by_hash(self.json_data_hash)
class TaskModelException(Exception):
"""Copied from SpiffWorkflow.exceptions.WorkflowTaskException.
Reimplements the exception from SpiffWorkflow to not require a spiff_task.
"""
def __init__(self, error_msg: str, task_model: TaskModel, exception: Optional[Exception]=None,
line_number: Optional[int]=None, offset: Optional[int]=None, error_line: Optional[str]=None):
self.task_model = task_model
self.line_number = line_number
self.offset = offset
self.error_line = error_line
self.notes: list[str] = []
if exception:
self.error_type = exception.__class__.__name__
else:
self.error_type = "unknown"
if isinstance(exception, SyntaxError) and not line_number:
self.line_number = exception.lineno
self.offset = exception.offset
elif isinstance(exception, NameError):
self.add_note(WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys())))
# If encountered in a sub-workflow, this traces back up the stack,
# so we can tell how we got to this particular task, no matter how
# deeply nested in sub-workflows it is. Takes the form of:
# task-description (file-name)
self.task_trace = self.get_task_trace(task_model)
def add_note(self, note: str) -> None:
self.notes.append(note)
def __str__(self) -> str:
return super().__str__() + ". " + ". ".join(self.notes)
# TODO: implement this with db
@classmethod
def get_task_trace(cls, _task_model: TaskModel) -> list[str]:
return []
# task_bpmn_name = task_model.task_definition.bpmn_name
#
# task_trace = [f"{task.task_spec.description} ({task.workflow.spec.file})"]
# workflow = task.workflow
# while workflow != workflow.outer_workflow:
# caller = workflow.name
# workflow = workflow.outer_workflow
# task_trace.append(f"{workflow.spec.task_specs[caller].description} ({workflow.spec.file})")
# return task_trace
class Task:
"""Task."""

View File

@ -1,5 +1,6 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
import os
import uuid
from sys import exc_info
@ -43,7 +44,7 @@ from spiffworkflow_backend.models.process_instance import (
)
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.task import TaskModelException, TaskModel # noqa: F401
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import (
_find_principal_or_raise,
@ -67,7 +68,7 @@ from spiffworkflow_backend.services.process_instance_service import (
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskService
from spiffworkflow_backend.services.task_service import TaskModelException, TaskService
class TaskDataSelectOption(TypedDict):
@ -218,7 +219,7 @@ def task_data_update(
)
if json_data_dict is not None:
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid
)
try:
@ -414,9 +415,11 @@ def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[st
last_task = None
while last_task != spiff_task:
task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
instructions = _render_instructions_for_end_user(task_model)
extensions = TaskService.get_extensions_from_task_model(task_model)
instructions = _render_instructions_for_end_user(task_model, extensions)
if instructions and spiff_task.id not in reported_ids:
reported_ids.append(spiff_task.id)
task.properties = extensions
yield f"data: {current_app.json.dumps(task)} \n\n"
last_task = spiff_task
try:
@ -696,7 +699,7 @@ def _render_jinja_template(unprocessed_template: str, task_model: TaskModel) ->
jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True)
try:
template = jinja_environment.from_string(unprocessed_template)
return template.render(**(task_model.data or {}))
return template.render(**(task_model.get_data()))
except jinja2.exceptions.TemplateError as template_error:
wfe = TaskModelException(str(template_error), task_model=task_model, exception=template_error)
if isinstance(template_error, TemplateSyntaxError):

View File

@ -2,6 +2,7 @@
# TODO: clean up this service for a clear distinction between it and the process_instance_service
# where this points to the pi service
import _strptime # type: ignore
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
import copy
import decimal
import json
@ -1196,7 +1197,7 @@ class ProcessInstanceProcessor:
db.session.bulk_save_objects(new_task_models.values())
TaskService.insert_or_update_json_data_records(new_json_data_dicts)
TaskService.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id)
ProcessInstanceTmpService.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id)
self.save()
# Saving the workflow seems to reset the status
self.suspend()
@ -1209,7 +1210,7 @@ class ProcessInstanceProcessor:
def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None:
"""Reset a process to an earlier state."""
# raise Exception("This feature to reset a process instance to a given task is currently unavaiable")
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid
)
@ -1738,7 +1739,7 @@ class ProcessInstanceProcessor:
TaskService.update_json_data_dicts_using_list(json_data_dict_list, json_data_dict_mapping)
TaskService.insert_or_update_json_data_records(json_data_dict_mapping)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model,
ProcessInstanceEventType.task_completed.value,
task_guid=task_model.guid,
@ -1842,7 +1843,7 @@ class ProcessInstanceProcessor:
self.save()
self.process_instance_model.status = "terminated"
db.session.add(self.process_instance_model)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, ProcessInstanceEventType.process_instance_terminated.value
)
db.session.commit()
@ -1851,7 +1852,7 @@ class ProcessInstanceProcessor:
"""Suspend."""
self.process_instance_model.status = ProcessInstanceStatus.suspended.value
db.session.add(self.process_instance_model)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, ProcessInstanceEventType.process_instance_suspended.value
)
db.session.commit()
@ -1860,7 +1861,7 @@ class ProcessInstanceProcessor:
"""Resume."""
self.process_instance_model.status = ProcessInstanceStatus.waiting.value
db.session.add(self.process_instance_model)
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model, ProcessInstanceEventType.process_instance_resumed.value
)
db.session.commit()

View File

@ -14,6 +14,7 @@ from spiffworkflow_backend.models.process_instance_queue import (
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from spiffworkflow_backend.services.task_service import TaskService
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError
@ -103,7 +104,7 @@ class ProcessInstanceQueueService:
# these events are handled in the WorkflowExecutionService.
# that is, we don't need to add error_detail records here, etc.
if not isinstance(ex, WorkflowExecutionServiceError):
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.process_instance_error.value, exception=ex
)
db.session.commit()

View File

@ -0,0 +1,76 @@
from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel
import traceback
from spiffworkflow_backend.models.db import db
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from typing import Tuple
import time
from flask import g
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from typing import Optional
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
class ProcessInstanceTmpService():
"""Temporary service to hold methods that should eventually be moved to ProcessInstanceService.
These methods cannot live there due to circular import issues with the ProcessInstanceProcessor.
"""
# TODO: move to process_instance_service once we clean it and the processor up
@classmethod
def add_event_to_process_instance(
cls,
process_instance: ProcessInstanceModel,
event_type: str,
task_guid: Optional[str] = None,
user_id: Optional[int] = None,
exception: Optional[Exception] = None,
timestamp: Optional[float] = None,
add_to_db_session: Optional[bool] = True,
) -> Tuple[ProcessInstanceEventModel, Optional[ProcessInstanceErrorDetailModel]]:
if user_id is None and hasattr(g, "user") and g.user:
user_id = g.user.id
if timestamp is None:
timestamp = time.time()
process_instance_event = ProcessInstanceEventModel(
process_instance_id=process_instance.id, event_type=event_type, timestamp=timestamp, user_id=user_id
)
if task_guid:
process_instance_event.task_guid = task_guid
if add_to_db_session:
db.session.add(process_instance_event)
process_instance_error_detail = None
if exception is not None:
# truncate to avoid database errors on large values. We observed that text in mysql is 65K.
stacktrace = traceback.format_exc().split("\n")
message = str(exception)[0:1023]
task_line_number = None
task_line_contents = None
task_trace = None
task_offset = None
# check for the class name string for ApiError to avoid circular imports
if isinstance(exception, WorkflowTaskException) or (
exception.__class__.__name__ == 'ApiError' and exception.error_code == "task_error"
):
task_line_number = exception.line_number
task_line_contents = exception.error_line[0:255]
task_trace = exception.task_trace
task_offset = exception.offset
process_instance_error_detail = ProcessInstanceErrorDetailModel(
process_instance_event=process_instance_event,
message=message,
stacktrace=stacktrace,
task_line_number=task_line_number,
task_line_contents=task_line_contents,
task_trace=task_trace,
task_offset=task_offset,
)
if add_to_db_session:
db.session.add(process_instance_error_detail)
return (process_instance_event, process_instance_error_detail)

View File

@ -1,7 +1,11 @@
import copy
import json
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
from typing import Union
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
import time
import traceback
from hashlib import sha256
from typing import Optional
from typing import Tuple
@ -9,25 +13,22 @@ from typing import TypedDict
from uuid import UUID
from flask import current_app
from flask import g
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.task import TaskStateNames
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_error_detail import ProcessInstanceErrorDetailModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
@ -41,6 +42,60 @@ class JsonDataDict(TypedDict):
data: dict
class TaskModelException(Exception):
"""Copied from SpiffWorkflow.exceptions.WorkflowTaskException.
Reimplements the exception from SpiffWorkflow to not require a spiff_task.
"""
def __init__(self, error_msg: str, task_model: TaskModel, exception: Optional[Exception]=None,
line_number: Optional[int]=None, offset: Optional[int]=None, error_line: Optional[str]=None):
self.task_model = task_model
self.line_number = line_number
self.offset = offset
self.error_line = error_line
self.notes: list[str] = []
if exception:
self.error_type = exception.__class__.__name__
else:
self.error_type = "unknown"
if isinstance(exception, SyntaxError) and not line_number:
self.line_number = exception.lineno
self.offset = exception.offset
elif isinstance(exception, NameError):
self.add_note(WorkflowException.did_you_mean_from_name_error(exception, list(task_model.get_data().keys())))
# If encountered in a sub-workflow, this traces back up the stack,
# so we can tell how we got to this particular task, no matter how
# deeply nested in sub-workflows it is. Takes the form of:
# task-description (file-name)
self.task_trace = self.get_task_trace(task_model)
def add_note(self, note: str) -> None:
self.notes.append(note)
def __str__(self) -> str:
return super().__str__() + ". " + ". ".join(self.notes)
@classmethod
def get_task_trace(cls, task_model: TaskModel) -> list[str]:
task_definition = task_model.task_definition
task_bpmn_name = TaskService.get_name_for_display(task_definition)
bpmn_process = task_model.bpmn_process
spec_reference = TaskService.get_spec_reference_from_bpmn_process(bpmn_process)
task_trace = [f"{task_bpmn_name} ({spec_reference.file_name})"]
while bpmn_process.guid is not None:
caller_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
bpmn_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first()
spec_reference = TaskService.get_spec_reference_from_bpmn_process(bpmn_process)
task_trace.append(f"{TaskService.get_name_for_display(caller_task_model.task_definition)} ({spec_reference.file_name})")
return task_trace
class TaskService:
PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state"
@ -161,7 +216,7 @@ class TaskService:
if task_model.state == "COMPLETED":
event_type = ProcessInstanceEventType.task_completed.value
timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time()
process_instance_event, _process_instance_error_detail = TaskService.add_event_to_process_instance(
process_instance_event, _process_instance_error_detail = ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance,
event_type,
task_guid=task_model.guid,
@ -641,60 +696,20 @@ class TaskService:
)
return extensions
# TODO: move to process_instance_service once we clean it and the processor up
@classmethod
def add_event_to_process_instance(
cls,
process_instance: ProcessInstanceModel,
event_type: str,
task_guid: Optional[str] = None,
user_id: Optional[int] = None,
exception: Optional[Exception] = None,
timestamp: Optional[float] = None,
add_to_db_session: Optional[bool] = True,
) -> Tuple[ProcessInstanceEventModel, Optional[ProcessInstanceErrorDetailModel]]:
if user_id is None and hasattr(g, "user") and g.user:
user_id = g.user.id
if timestamp is None:
timestamp = time.time()
def get_spec_reference_from_bpmn_process(cls, bpmn_process: BpmnProcessModel) -> SpecReferenceCache:
"""Get the bpmn file for a given task model.
process_instance_event = ProcessInstanceEventModel(
process_instance_id=process_instance.id, event_type=event_type, timestamp=timestamp, user_id=user_id
)
if task_guid:
process_instance_event.task_guid = task_guid
if add_to_db_session:
db.session.add(process_instance_event)
process_instance_error_detail = None
if exception is not None:
# truncate to avoid database errors on large values. We observed that text in mysql is 65K.
stacktrace = traceback.format_exc().split("\n")
message = str(exception)[0:1023]
task_line_number = None
task_line_contents = None
task_trace = None
task_offset = None
if isinstance(exception, WorkflowTaskException) or (
isinstance(exception, ApiError) and exception.error_code == "task_error"
):
task_line_number = exception.line_number
task_line_contents = exception.error_line[0:255]
task_trace = exception.task_trace
task_offset = exception.offset
process_instance_error_detail = ProcessInstanceErrorDetailModel(
process_instance_event=process_instance_event,
message=message,
stacktrace=stacktrace,
task_line_number=task_line_number,
task_line_contents=task_line_contents,
task_trace=task_trace,
task_offset=task_offset,
This involves several queries so avoid calling in a tight loop.
"""
bpmn_process_definition = bpmn_process.bpmn_process_definition
spec_reference: Optional[SpecReferenceCache] = SpecReferenceCache.query.filter_by(identifier=bpmn_process_definition.bpmn_identifier, type='process').first()
if spec_reference is None:
raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {bpmn_process_definition.bpmn_identifier}"
)
return spec_reference
if add_to_db_session:
db.session.add(process_instance_error_detail)
return (process_instance_event, process_instance_error_detail)
@classmethod
def get_name_for_display(cls, entity: Union[TaskDefinitionModel, BpmnProcessDefinitionModel]) -> str:
return entity.bpmn_name or entity.bpmn_identifier

View File

@ -1,4 +1,5 @@
from __future__ import annotations
from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService
import copy
import time
@ -395,7 +396,7 @@ class WorkflowExecutionService:
self.process_bpmn_messages()
self.queue_waiting_receive_messages()
except WorkflowTaskException as wte:
TaskService.add_event_to_process_instance(
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model,
ProcessInstanceEventType.task_failed.value,
exception=wte,

View File

@ -95,9 +95,7 @@ class TestForGoodErrors(BaseTest):
assert response.json["error_type"] == "TemplateSyntaxError"
assert response.json["line_number"] == 3
assert response.json["error_line"] == "{{ x +=- 1}}"
# TODO: implement this
# assert response.json["file_name"] == "instructions_error.bpmn"
print(f"response.json: {response.json}")
assert response.json["file_name"] == "instructions_error.bpmn"
assert "instructions for end user" in response.json["message"]
assert "Jinja2" in response.json["message"]
assert "unexpected '='" in response.json["message"]