added event logs for skipping and manually editing a task

This commit is contained in:
jasquat 2023-03-17 13:51:56 -04:00
parent 88df3bd5c3
commit 3782864dc9
8 changed files with 63 additions and 116 deletions

View File

@ -1,5 +1,3 @@
from __future__ import with_statement
import logging import logging
from logging.config import fileConfig from logging.config import fileConfig

View File

@ -1,13 +1,14 @@
from __future__ import annotations from __future__ import annotations
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from typing import Any from typing import Any
from sqlalchemy.orm import validates
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import validates
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.user import UserModel
# event types take the form [SUBJECT]_[PAST_TENSE_VERB] since subject is not always the same. # event types take the form [SUBJECT]_[PAST_TENSE_VERB] since subject is not always the same.

View File

@ -17,24 +17,28 @@ from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError, ProcessEntityNotFoundError,
) )
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data import JsonDataModel
from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ( from spiffworkflow_backend.models.process_instance import (
ProcessInstanceTaskDataCannotBeUpdatedError, ProcessInstanceTaskDataCannotBeUpdatedError,
) )
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.process_instance_file_data import ( from spiffworkflow_backend.models.process_instance_file_data import (
ProcessInstanceFileDataModel, ProcessInstanceFileDataModel,
) )
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.task_service import TaskService
process_api_blueprint = Blueprint("process_api", __name__) process_api_blueprint = Blueprint("process_api", __name__)
@ -180,34 +184,33 @@ def task_data_update(
f" It is currently: {process_instance.status}" f" It is currently: {process_instance.status}"
) )
process_instance_data = process_instance.process_instance_data task_model = TaskModel.query.filter_by(guid=task_id).first()
if process_instance_data is None: if task_model is None:
raise ApiError( raise ApiError(
error_code="process_instance_data_not_found", error_code="update_task_data_error",
message=f"Could not find task data related to process instance: {process_instance.id}", message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.",
) )
process_instance_data_dict = json.loads(process_instance_data.runtime_json)
if "new_task_data" in body: if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"] new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str) new_task_data_dict = json.loads(new_task_data_str)
if task_id in process_instance_data_dict["tasks"]: json_data_dict = TaskService.update_task_data_on_task_model(
process_instance_data_dict["tasks"][task_id]["data"] = new_task_data_dict task_model, new_task_data_dict, "json_data_hash"
process_instance_data.runtime_json = json.dumps(process_instance_data_dict) )
db.session.add(process_instance_data) if json_data_dict is not None:
try: json_data = JsonDataModel(**json_data_dict)
db.session.commit() db.session.add(json_data)
except Exception as e: ProcessInstanceProcessor.add_event_to_process_instance(
db.session.rollback() process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_id
raise ApiError( )
error_code="update_task_data_error", try:
message=f"Could not update the Instance. Original error is {e}", db.session.commit()
) from e except Exception as e:
else: db.session.rollback()
raise ApiError( raise ApiError(
error_code="update_task_data_error", error_code="update_task_data_error",
message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", message=f"Could not update the Instance. Original error is {e}",
) ) from e
else: else:
raise ApiError( raise ApiError(
error_code="update_task_data_error", error_code="update_task_data_error",

View File

@ -266,8 +266,9 @@ def process_instance_log_list(
) )
logs = ( logs = (
log_query.order_by(ProcessInstanceEventModel.timestamp.desc(), log_query.order_by(
ProcessInstanceEventModel.id.desc()) # type: ignore ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore
)
.outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id) .outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id)
.add_columns( .add_columns(
TaskModel.guid.label("spiff_task_guid"), # type: ignore TaskModel.guid.label("spiff_task_guid"), # type: ignore

View File

@ -1,7 +1,5 @@
"""Process_instance_processor.""" """Process_instance_processor."""
import _strptime # type: ignore import _strptime # type: ignore
from flask import g
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel, ProcessInstanceEventType
import decimal import decimal
import json import json
import logging import logging
@ -25,6 +23,7 @@ from uuid import UUID
import dateparser import dateparser
import pytz import pytz
from flask import current_app from flask import current_app
from flask import g
from lxml import etree # type: ignore from lxml import etree # type: ignore
from lxml.etree import XMLSyntaxError # type: ignore from lxml.etree import XMLSyntaxError # type: ignore
from RestrictedPython import safe_globals # type: ignore from RestrictedPython import safe_globals # type: ignore
@ -75,6 +74,8 @@ from spiffworkflow_backend.models.message_instance_correlation import (
) )
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.process_instance_metadata import ( from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel, ProcessInstanceMetadataModel,
) )
@ -1240,6 +1241,7 @@ class ProcessInstanceProcessor:
def manual_complete_task(self, task_id: str, execute: bool) -> None: def manual_complete_task(self, task_id: str, execute: bool) -> None:
"""Mark the task complete optionally executing it.""" """Mark the task complete optionally executing it."""
spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
event_type = ProcessInstanceEventType.task_skipped.value
if execute: if execute:
current_app.logger.info( current_app.logger.info(
f"Manually executing Task {spiff_task.task_spec.name} of process" f"Manually executing Task {spiff_task.task_spec.name} of process"
@ -1255,6 +1257,7 @@ class ProcessInstanceProcessor:
break break
else: else:
spiff_task.complete() spiff_task.complete()
event_type = ProcessInstanceEventType.task_executed_manually.value
else: else:
spiff_logger = logging.getLogger("spiff") spiff_logger = logging.getLogger("spiff")
spiff_logger.info(f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info()) spiff_logger.info(f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info())
@ -1275,6 +1278,7 @@ class ProcessInstanceProcessor:
self.increment_spiff_step() self.increment_spiff_step()
self.add_step() self.add_step()
self.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id)
self.save() self.save()
# Saving the workflow seems to reset the status # Saving the workflow seems to reset the status
self.suspend() self.suspend()
@ -1813,8 +1817,9 @@ class ProcessInstanceProcessor:
json_data = JsonDataModel(**json_data_dict) json_data = JsonDataModel(**json_data_dict)
db.session.add(json_data) db.session.add(json_data)
self.add_event_to_process_instance(self.process_instance_model, self.add_event_to_process_instance(
ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid) self.process_instance_model, ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid
)
# this is the thing that actually commits the db transaction (on behalf of the other updates above as well) # this is the thing that actually commits the db transaction (on behalf of the other updates above as well)
self.save() self.save()
@ -1940,33 +1945,39 @@ class ProcessInstanceProcessor:
self.save() self.save()
self.process_instance_model.status = "terminated" self.process_instance_model.status = "terminated"
db.session.add(self.process_instance_model) db.session.add(self.process_instance_model)
self.add_event_to_process_instance(self.process_instance_model, self.add_event_to_process_instance(
ProcessInstanceEventType.process_instance_terminated.value) self.process_instance_model, ProcessInstanceEventType.process_instance_terminated.value
)
db.session.commit() db.session.commit()
def suspend(self) -> None: def suspend(self) -> None:
"""Suspend.""" """Suspend."""
self.process_instance_model.status = ProcessInstanceStatus.suspended.value self.process_instance_model.status = ProcessInstanceStatus.suspended.value
db.session.add(self.process_instance_model) db.session.add(self.process_instance_model)
self.add_event_to_process_instance(self.process_instance_model, self.add_event_to_process_instance(
ProcessInstanceEventType.process_instance_suspended.value) self.process_instance_model, ProcessInstanceEventType.process_instance_suspended.value
)
db.session.commit() db.session.commit()
def resume(self) -> None: def resume(self) -> None:
"""Resume.""" """Resume."""
self.process_instance_model.status = ProcessInstanceStatus.waiting.value self.process_instance_model.status = ProcessInstanceStatus.waiting.value
db.session.add(self.process_instance_model) db.session.add(self.process_instance_model)
self.add_event_to_process_instance(self.process_instance_model, self.add_event_to_process_instance(
ProcessInstanceEventType.process_instance_resumed.value) self.process_instance_model, ProcessInstanceEventType.process_instance_resumed.value
)
db.session.commit() db.session.commit()
@classmethod @classmethod
def add_event_to_process_instance(cls, process_instance: ProcessInstanceModel, event_type: str, task_guid: Optional[str] = None) -> None: def add_event_to_process_instance(
cls, process_instance: ProcessInstanceModel, event_type: str, task_guid: Optional[str] = None
) -> None:
user_id = None user_id = None
if g.user: if g.user:
user_id = g.user.id user_id = g.user.id
process_instance_event = ProcessInstanceEventModel( process_instance_event = ProcessInstanceEventModel(
process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id) process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id
)
if task_guid: if task_guid:
process_instance_event.task_guid = task_guid process_instance_event.task_guid = task_guid
db.session.add(process_instance_event) db.session.add(process_instance_event)

View File

@ -60,8 +60,8 @@ class TaskService:
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
task_model.properties_json = new_properties_json task_model.properties_json = new_properties_json
task_model.state = TaskStateNames[new_properties_json["state"]] task_model.state = TaskStateNames[new_properties_json["state"]]
json_data_dict = cls._update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") json_data_dict = cls.update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash")
python_env_dict = cls._update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") python_env_dict = cls.update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash")
return [json_data_dict, python_env_dict] return [json_data_dict, python_env_dict]
@classmethod @classmethod
@ -246,14 +246,14 @@ class TaskService:
task_model.properties_json = task_properties task_model.properties_json = task_properties
new_task_models[task_model.guid] = task_model new_task_models[task_model.guid] = task_model
json_data_dict = TaskService._update_task_data_on_task_model( json_data_dict = TaskService.update_task_data_on_task_model(
task_model, task_data_dict, "json_data_hash" task_model, task_data_dict, "json_data_hash"
) )
if json_data_dict is not None: if json_data_dict is not None:
new_json_data_dicts[json_data_dict["hash"]] = json_data_dict new_json_data_dicts[json_data_dict["hash"]] = json_data_dict
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
python_env_dict = TaskService._update_task_data_on_task_model( python_env_dict = TaskService.update_task_data_on_task_model(
task_model, python_env_data_dict, "python_env_data_hash" task_model, python_env_data_dict, "python_env_data_hash"
) )
if python_env_dict is not None: if python_env_dict is not None:
@ -274,7 +274,7 @@ class TaskService:
return json_data_dict return json_data_dict
@classmethod @classmethod
def _update_task_data_on_task_model( def update_task_data_on_task_model(
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
) -> Optional[JsonDataDict]: ) -> Optional[JsonDataDict]:
task_data_json = json.dumps(task_data_dict, sort_keys=True) task_data_json = json.dumps(task_data_dict, sort_keys=True)

View File

@ -154,7 +154,11 @@ class TaskModelSavingDelegate(EngineStepDelegate):
# which script tasks execute when READY. # which script tasks execute when READY.
timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time()
process_instance_event = ProcessInstanceEventModel( process_instance_event = ProcessInstanceEventModel(
task_guid=task_model.guid, process_instance_id=self.process_instance.id, event_type=event_type, timestamp=timestamp) task_guid=task_model.guid,
process_instance_id=self.process_instance.id,
event_type=event_type,
timestamp=timestamp,
)
self.process_instance_events[task_model.guid] = process_instance_event self.process_instance_events[task_model.guid] = process_instance_event
return task_model return task_model

View File

@ -381,40 +381,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
</Column> </Column>
</Grid> </Grid>
{lastUpdatedTimeTag} {lastUpdatedTimeTag}
{/*
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Suspended at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:12:05 (by jason)
</Column>
</Grid>
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Resumed at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:13:05 (by jason)
</Column>
</Grid>
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Suspended at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:14:05 (by jason)
</Column>
</Grid>
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Terminated at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:15:05 (by jason)
</Column>
</Grid>
*/}
<Grid condensed fullWidth> <Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title"> <Column sm={1} md={1} lg={2} className="grid-list-title">
Process model revision:{' '} Process model revision:{' '}
@ -434,43 +400,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
</Tag> </Tag>
</Column> </Column>
</Grid> </Grid>
{/*
<br />
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Suspended at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:12:05 (by jason)
</Column>
</Grid>
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Resumed at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:13:05 (by jason)
</Column>
</Grid>
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Suspended at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:14:05 (by jason)
</Column>
</Grid>
<Grid condensed fullWidth>
<Column sm={1} md={1} lg={2} className="grid-list-title">
Terminated at:{' '}
</Column>
<Column sm={3} md={3} lg={3} className="grid-date">
2023-03-17 10:15:05 (by jason)
</Column>
</Grid>
*/}
<br /> <br />
<Grid condensed fullWidth> <Grid condensed fullWidth>
<Column sm={2} md={2} lg={2}> <Column sm={2} md={2} lg={2}>