added ability to add the process instance id to the spiff logs w/ burnettk

This commit is contained in:
jasquat 2022-09-01 13:01:23 -04:00
parent 5907a3a4bf
commit f183e66fac
6 changed files with 43 additions and 41 deletions

6
poetry.lock generated
View File

@ -1846,8 +1846,8 @@ pytz = "*"
[package.source]
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "0fa2048ef2d9dd5fe856f2e476df77966bdab892"
reference = "feature/centralized-logging"
resolved_reference = "ea360a901fe35408885e3c9797af07a2c6ced812"
[[package]]
name = "sqlalchemy"
@ -2157,7 +2157,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "^3.9"
content-hash = "e395233463dec69fa7870815ff342096d7de3ea8899c54eac61ba334ff0b1c73"
content-hash = "22c1b45f95769cf86303de8139a7be931a85d87d9e3c3f826e3af201a388df82"
[metadata.files]
alabaster = [

View File

@ -27,7 +27,7 @@ flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/centralized-logging"}
# spiffworkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
sentry-sdk = "1.9.0"
sphinx-autoapi = "^1.8.4"

View File

@ -1,5 +1,6 @@
"""__init__.py."""
import os
import threading
from flask.app import Flask
from werkzeug.utils import ImportStringError
@ -58,3 +59,6 @@ def setup_config(app: Flask) -> None:
raise ModuleNotFoundError(
f"Cannot find config module: {env_config_module}"
) from exception
thread_local_data = threading.local()
app.config['THREAD_LOCAL_DATA'] = thread_local_data

View File

@ -24,7 +24,6 @@ from spiffworkflow_backend.models.process_instance import (
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
) # noqa: F401
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
from spiffworkflow_backend.models.task_event import TaskEventModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401
from spiffworkflow_backend.models.user_group_assignment import (

View File

@ -3,6 +3,7 @@ import json
import logging
import os
import sys
import threading
from typing import Any
from typing import Optional
@ -83,25 +84,28 @@ class JsonFormatter(logging.Formatter):
return json.dumps(message_dict, default=str)
def setup_logger_for_sql_statements(app: Flask) -> None:
"""Setup_logger_for_sql_statements."""
db_log_file_name = f"log/db_{app.env}.log"
db_handler_log_level = logging.INFO
db_logger_log_level = logging.DEBUG
db_handler = logging.FileHandler(db_log_file_name)
db_handler.setLevel(db_handler_log_level)
db_logger = logging.getLogger("sqlalchemy")
db_logger.propagate = False
db_logger.addHandler(db_handler)
db_logger.setLevel(db_logger_log_level)
class SpiffFilter(logging.Filter):
def __init__(self, app: Flask):
self.app = app
super().__init__()
def filter(self, record):
tld = self.app.config['THREAD_LOCAL_DATA']
process_instance_id = ''
if hasattr(tld, 'process_instance_id'):
process_instance_id = tld.process_instance_id
record.process_instance_id = process_instance_id
return True
def setup_logger(app: Flask) -> None:
"""Setup_logger."""
log_level = logging.DEBUG
handlers = []
log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
log_formatter = logging._defaultFormatter # type: ignore
log_level = logging.DEBUG
log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# the json formatter is nice for real environments but makes
# debugging locally a little more difficult
@ -120,42 +124,33 @@ def setup_logger(app: Flask) -> None:
)
log_formatter = json_formatter
if os.environ.get("IS_GUNICORN") == "true":
gunicorn_logger_error = logging.getLogger("gunicorn.error")
log_level = gunicorn_logger_error.level
ghandler = gunicorn_logger_error.handlers[0]
ghandler.setFormatter(log_formatter)
handlers.append(ghandler)
gunicorn_logger_access = logging.getLogger("gunicorn.access")
log_level = gunicorn_logger_access.level
ghandler = gunicorn_logger_access.handlers[0]
ghandler.setFormatter(log_formatter)
handlers.append(ghandler)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(log_level)
handler.setFormatter(log_formatter)
handlers.append(handler)
logging.basicConfig(handlers=handlers)
setup_logger_for_sql_statements(app)
# make all loggers act the same
for name in logging.root.manager.loggerDict:
if 'spiff' not in name:
the_logger = logging.getLogger(name)
the_logger.setLevel(log_level)
for the_handler in the_logger.handlers:
the_handler.setFormatter(log_formatter)
the_handler.setLevel(log_level)
spiff_logger = logging.getLogger("spiff.metrics")
spiff_logger.setLevel(logging.DEBUG)
# spiff_logger_handler = logging.StreamHandler(sys.stdout)
spiff_formatter = logging.Formatter(
"%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s"
"%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s"
)
# spiff_logger_handler.setFormatter(spiff_formatter)
# fh = logging.FileHandler('test.log')
# spiff_logger_handler.setLevel(logging.DEBUG)
# spiff_logger.addHandler(spiff_logger_handler)
# if you add a handler to spiff, it will be used/inherited by spiff.metrics
# if you add a filter to the spiff logger directly (and not the handler), it will NOT be inherited by spiff.metrics
# so put filters on handlers.
db_handler = DBHandler()
db_handler.setLevel(logging.DEBUG)
db_handler.setFormatter(spiff_formatter)
db_handler.addFilter(SpiffFilter(app))
spiff_logger.addHandler(db_handler)

View File

@ -1,5 +1,7 @@
"""Process_instance_processor."""
import json
import logging
import threading
import time
from typing import Any
from typing import Dict
@ -159,6 +161,8 @@ class ProcessInstanceProcessor:
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False
) -> None:
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
current_app.config['THREAD_LOCAL_DATA'].process_instance_id = process_instance_model.id
self.process_instance_model = process_instance_model
self.process_model_service = ProcessModelService()
spec = None