add in missing fields to logs

This commit is contained in:
jasquat 2023-03-16 17:58:43 -04:00
parent a2e659da87
commit 8bd946235c
No known key found for this signature in database
4 changed files with 82 additions and 54 deletions

View File

@ -1,5 +1,7 @@
"""APIs for dealing with process groups, process models, and process instances."""
import base64
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
import json
from typing import Any
from typing import Dict
@ -236,10 +238,15 @@ def process_instance_log_list(
log_query = TaskModel.query.filter_by(process_instance_id=process_instance.id)
logs = (
log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore
.join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
.join(BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id)
.outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id)
.outerjoin(UserModel, UserModel.id == HumanTaskModel.completed_by_user_id)
.add_columns(
TaskModel.guid.label('spiff_task_guid'),
UserModel.username,
BpmnProcessDefinitionModel.bpmn_identifier.label('bpmn_process_definition_identifier'),
TaskDefinitionModel.bpmn_identifier.label('task_definition_identifier'),
)
.paginate(page=page, per_page=per_page, error_out=False)
)

View File

@ -78,7 +78,11 @@ class TaskModelSavingDelegate(EngineStepDelegate):
def did_complete_task(self, spiff_task: SpiffTask) -> None:
if self._should_update_task_model():
self._update_task_model_with_spiff_task(spiff_task)
task_model = self._update_task_model_with_spiff_task(spiff_task)
if self.current_task_start_in_seconds is None:
raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend")
task_model.start_in_seconds = self.current_task_start_in_seconds
task_model.end_in_seconds = time.time()
if self.secondary_engine_step_delegate:
self.secondary_engine_step_delegate.did_complete_task(spiff_task)
@ -117,7 +121,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
if json_data_dict is not None:
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask) -> None:
def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask) -> TaskModel:
bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task(
spiff_task,
@ -137,6 +141,8 @@ class TaskModelSavingDelegate(EngineStepDelegate):
json_data_dict_list.append(bpmn_process_json_data)
self._update_json_data_dicts_using_list(json_data_dict_list)
return task_model
class StepDetailLoggingDelegate(EngineStepDelegate):
"""Engine step delegate that takes care of logging spiff step details.

View File

@ -1,9 +1,17 @@
"""Test_logging_service."""
from flask.app import Flask
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
class TestLoggingService(BaseTest):
@ -16,58 +24,52 @@ class TestLoggingService(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_run."""
process_group_id = "test_logging_spiff_logger"
process_model_id = "simple_script"
self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id)
process_model_identifier = f"{process_group_id}/{process_model_id}"
# create the model
self.create_process_model_with_api(
client=client,
process_model_id=process_model_identifier,
process_model_display_name="Simple Script",
process_model_description="Simple Script",
user=with_super_admin_user,
)
self.create_process_group(client, with_super_admin_user, "test_group", "test_group")
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None
AuthorizationService.import_permissions_from_yaml_file()
bpmn_file_name = "simple_script.bpmn"
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, "simple_script")
# add bpmn to the model
self.create_spec_file(
client=client,
process_model_id=process_model_identifier,
file_name=bpmn_file_name,
file_data=bpmn_file_data_bytes,
user=with_super_admin_user,
process_model = load_test_spec(
process_model_id="misc/category_number_one/simple_form",
# bpmn_file_name="simp.bpmn",
process_model_source_directory="simple_form",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert len(process_instance.active_human_tasks) == 1
human_task = process_instance.active_human_tasks[0]
assert len(human_task.potential_owners) == 1
assert human_task.potential_owners[0] == initiator_user
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
human_task.task_name, processor.bpmn_process_instance
)
ProcessInstanceService.complete_form_task(processor, spiff_task, {"name": "HEY"}, initiator_user, human_task)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
headers=headers,
)
assert response.status_code == 200
log_response = client.get(
f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?detailed=true",
f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}?detailed=true",
headers=headers,
)
assert log_response.status_code == 200
assert log_response.json
logs: list = log_response.json["results"]
assert len(logs) == 8
print(f"logs[0]: {logs[0]}")
# for log in logs:
# assert log["process_instance_id"] == process_instance_id
# for key in [
# "timestamp",
# "spiff_task_guid",
# "bpmn_task_identifier",
# "bpmn_process_identifier",
# "message",
# ]:
# assert key in log.keys()
assert len(logs) == 7
for log in logs:
assert log["process_instance_id"] == process_instance.id
for key in [
"start_in_seconds",
"end_in_seconds",
"spiff_task_guid",
"bpmn_process_definition_identifier",
"task_definition_identifier",
]:
assert key in log.keys()
if log['task_definition_identifier'] == 'Activity_SimpleForm':
assert log['username'] == initiator_user.username

View File

@ -1,5 +1,7 @@
"""Test_process_instance_processor."""
from uuid import UUID
from spiffworkflow_backend.models import bpmn_process_definition
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
import pytest
from flask import g
@ -341,15 +343,18 @@ class TestProcessInstanceProcessor(BaseTest):
expected_python_env_data = expected_task_data[spiff_task.task_spec.name]
if spiff_task.task_spec.name in spiff_tasks_checked_once:
expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"]
task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task.task_definition_id is not None
task_definition = task.task_definition
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task_model.start_in_seconds is not None
assert task_model.end_in_seconds is not None
assert task_model.task_definition_id is not None
task_definition = task_model.task_definition
assert task_definition.bpmn_identifier == spiff_task_name
assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier
message = f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task.json_data()}"
message = f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}"
# TODO: if we split out env data again we will need to use it here instead of json_data
# assert task.python_env_data() == expected_python_env_data, message
assert task.json_data() == expected_python_env_data, message
# assert task_model.python_env_data() == expected_python_env_data, message
assert task_model.json_data() == expected_python_env_data, message
spiff_tasks_checked_once.append(spiff_task.task_spec.name)
all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks()
@ -360,6 +365,14 @@ class TestProcessInstanceProcessor(BaseTest):
assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess")
assert_spiff_task_is_in_process("top_level_script", "top_level_process")
if spiff_task.task_spec.name == 'top_level_call_activity':
# the task id / guid of the call activity gets used as the guid of the bpmn process that it calls
bpmn_process = BpmnProcessModel.query.filter_by(guid=str(spiff_task.id)).first()
assert bpmn_process is not None
bpmn_process_definition = bpmn_process.bpmn_process_definition
assert bpmn_process_definition is not None
assert bpmn_process_definition.bpmn_identifier == 'test_process_to_call'
assert processor.get_data() == fifth_data_set
def test_does_not_recreate_human_tasks_on_multiple_saves(