removed spiff_logging w/ burnettk

This commit is contained in:
jasquat 2023-03-17 15:49:16 -04:00
parent a565b96cba
commit 714a2aa2c1
No known key found for this signature in database
6 changed files with 0 additions and 191 deletions

View File

@ -41,7 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import (
) # noqa: F401
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
from spiffworkflow_backend.models.spiff_step_details import (
SpiffStepDetailsModel,
) # noqa: F401

View File

@ -1,25 +0,0 @@
"""Spiff_logging."""
from dataclasses import dataclass
from typing import Optional
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@dataclass
class SpiffLoggingModel(SpiffworkflowBaseDBModel):
"""SpiffLoggingModel."""
__tablename__ = "spiff_logging"
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(db.Integer, nullable=False)
bpmn_process_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_process_name: Optional[str] = db.Column(db.String(255), nullable=True)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_task_name: str = db.Column(db.String(255), nullable=True)
bpmn_task_type: str = db.Column(db.String(255), nullable=True)
spiff_task_guid: str = db.Column(db.String(50), nullable=False)
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
message: Optional[str] = db.Column(db.String(255), nullable=True)
current_user_id: int = db.Column(db.Integer, nullable=True)
spiff_step: int = db.Column(db.Integer, nullable=False)

View File

@ -42,7 +42,6 @@ from spiffworkflow_backend.models.process_instance_report import (
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskModel
@ -251,14 +250,6 @@ def process_instance_log_list(
)
if not detailed:
log_query = log_query.filter(
# 1. this was the previous implementation, where we only show completed tasks and skipped tasks.
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
# or_(
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
# )
# 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023
# we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities.
and_(
TaskModel.state.in_(["COMPLETED"]), # type: ignore
TaskDefinitionModel.typename.in_(["IntermediateThrowEvent"]), # type: ignore
@ -458,7 +449,6 @@ def process_instance_delete(
# (Pdb) db.session.delete
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
db.session.query(SpiffLoggingModel).filter_by(process_instance_id=process_instance.id).delete()
db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete()
db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete()
db.session.delete(process_instance)

View File

@ -9,10 +9,6 @@ from typing import Optional
from flask import g
from flask.app import Flask
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.task import Task
# flask logging formats:
# from: https://www.askpython.com/python-modules/flask/flask-logging
@ -181,79 +177,3 @@ def setup_logger(app: Flask) -> None:
for the_handler in the_logger.handlers:
the_handler.setFormatter(log_formatter)
the_handler.setLevel(log_level)
spiff_logger = logging.getLogger("spiff")
spiff_logger.setLevel(spiff_log_level)
spiff_formatter = logging.Formatter(
"%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |"
" %(process)s | %(processName)s | %(process_instance_id)s"
)
# if you add a handler to spiff, it will be used/inherited by spiff.metrics
# if you add a filter to the spiff logger directly (and not the handler), it will NOT be inherited by spiff.metrics
# so put filters on handlers.
db_handler = DBHandler()
db_handler.setLevel(spiff_log_level)
db_handler.setFormatter(spiff_formatter)
db_handler.addFilter(SpiffFilter(app))
spiff_logger.addHandler(db_handler)
# https://9to5answer.com/python-logging-to-database
class DBHandler(logging.Handler):
"""DBHandler."""
def __init__(self) -> None:
"""__init__."""
self.logs: list[dict] = []
super().__init__()
def bulk_insert_logs(self) -> None:
"""Bulk_insert_logs."""
db.session.bulk_insert_mappings(SpiffLoggingModel, self.logs)
db.session.commit()
self.logs = []
def emit(self, record: logging.LogRecord) -> None:
"""Emit."""
# if we do not have a process instance id then do not log and assume we are running a script unit test
# that initializes a BpmnWorkflow without a process instance
if record and record.process_instance_id: # type: ignore
bpmn_process_identifier = record.workflow_spec # type: ignore
bpmn_process_name = record.workflow_name # type: ignore
spiff_task_guid = str(record.task_id) # type: ignore
bpmn_task_identifier = str(record.task_spec) # type: ignore
bpmn_task_name = record.task_name if hasattr(record, "task_name") else None # type: ignore
bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore
timestamp = record.created
message = record.msg if hasattr(record, "msg") else None
current_user_id = None
if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr(record, "current_user_id"):
current_user_id = record.current_user_id # type: ignore
spiff_step = (
record.spiff_step # type: ignore
if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore
else 1
)
self.logs.append(
{
"process_instance_id": record.process_instance_id, # type: ignore
"bpmn_process_identifier": bpmn_process_identifier,
"bpmn_process_name": bpmn_process_name,
"spiff_task_guid": spiff_task_guid,
"bpmn_task_name": bpmn_task_name,
"bpmn_task_identifier": bpmn_task_identifier,
"bpmn_task_type": bpmn_task_type,
"message": message,
"timestamp": timestamp,
"current_user_id": current_user_id,
"spiff_step": spiff_step,
}
)
# so at some point we are going to insert logs.
# we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting
# on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log.
if len(self.logs) >= 100:
self.bulk_insert_logs()

View File

@ -3,16 +3,11 @@ from typing import Optional
from flask.app import Flask
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.process_instance_file_data import (
ProcessInstanceFileDataModel,
)
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
@ -194,31 +189,3 @@ class TestProcessInstanceService(BaseTest):
],
"not_a_file3": "just a value3",
}
def test_does_not_log_set_data_when_calling_engine_steps_on_waiting_call_activity(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_does_not_log_set_data_when_calling_engine_steps_on_waiting_call_activity."""
process_model = load_test_spec(
process_model_id="test_group/call-activity-to-human-task",
process_model_source_directory="call-activity-to-human-task",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all()
initial_length = len(process_instance_logs)
# ensure we have something in the logs
assert initial_length > 0
# logs should NOT increase after running this a second time since it's just waiting on a human task
processor.do_engine_steps(save=True)
process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all()
assert len(process_instance_logs) == initial_length

View File

@ -1,42 +0,0 @@
"""Process Model."""
from decimal import Decimal
from flask.app import Flask
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
class TestSpiffLogging(BaseTest):
"""TestSpiffLogging."""
def test_timestamps_are_stored_correctly(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
"""Test_timestamps_are_stored_correctly."""
process_model = load_test_spec(
"call_activity_test",
process_model_source_directory="call_activity_same_directory",
)
process_instance = self.create_process_instance_from_process_model(process_model)
bpmn_process_identifier = "test_process_identifier"
spiff_task_guid = "test_spiff_task_guid"
bpmn_task_identifier = "test_bpmn_task_identifier"
timestamp = 1663250624.664887 # actual timestamp from spiff logs
message = "test_message"
spiff_log = SpiffLoggingModel(
process_instance_id=process_instance.id,
bpmn_process_identifier=bpmn_process_identifier,
spiff_task_guid=spiff_task_guid,
bpmn_task_identifier=bpmn_task_identifier,
message=message,
timestamp=timestamp,
spiff_step=1,
)
assert spiff_log.timestamp == timestamp
db.session.add(spiff_log)
db.session.commit()
assert spiff_log.timestamp == Decimal(str(timestamp))