celery logging (#1295)

* some initial code to handle celery logs better w/ burnettk

* some more work to get celery logs properly formatting w/ burnettk

* added logger setup for celery w/ burnettk

* added logger setup for celery w/ burnettk

* use the same logger setup method for both celery and the flask server w/ burnettk

* move the log formatter to own method w/ burnettk

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2024-03-28 18:48:44 +00:00 committed by GitHub
parent 8fe75cd320
commit 13d7fa8968
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 57 additions and 28 deletions

View File

@ -1,7 +1,28 @@
import logging
import sys
from typing import Any
import celery
from spiffworkflow_backend import create_app
# we need to import tasks from this file so they can be used elsewhere in the app
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task import (
celery_task_process_instance_run, # noqa: F401
)
from spiffworkflow_backend.services.logging_service import get_log_formatter
from spiffworkflow_backend.services.logging_service import setup_logger_for_app
the_flask_app = create_app()
setting_variable_to_make_celery_happy_no_idea_how_this_works = the_flask_app.celery_app
@celery.signals.after_setup_logger.connect # type: ignore
def setup_loggers(logger: Any, *args: Any, **kwargs: Any) -> None:
log_formatter = get_log_formatter(the_flask_app)
logger.handlers = []
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(log_formatter)
logger.addHandler(stdout_handler)
setup_logger_for_app(the_flask_app, logger)

View File

@ -1,6 +1,7 @@
"""__init__.py."""
import base64
import logging
import os
import threading
import uuid
@ -9,7 +10,7 @@ from urllib.parse import urlparse
from flask.app import Flask
from werkzeug.utils import ImportStringError
from spiffworkflow_backend.services.logging_service import setup_logger
from spiffworkflow_backend.services.logging_service import setup_logger_for_app
HTTP_REQUEST_TIMEOUT_SECONDS = 15
CONNECTOR_PROXY_COMMAND_TIMEOUT = 45
@ -216,7 +217,7 @@ def setup_config(app: Flask) -> None:
app.config["PROCESS_UUID"] = uuid.uuid4()
setup_database_configs(app)
setup_logger(app)
setup_logger_for_app(app, logging)
app.logger.debug(
f"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR: {app.config['SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR']}"
)

View File

@ -83,39 +83,23 @@ class JsonFormatter(logging.Formatter):
return json.dumps(message_dict, default=str)
def setup_logger(app: Flask) -> None:
def setup_logger_for_app(app: Flask, logger: Any) -> None:
upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper()
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
if upper_log_level_string not in log_levels:
raise InvalidLogLevelError(f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}")
log_level = getattr(logging, upper_log_level_string)
spiff_log_level = getattr(logging, upper_log_level_string)
log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
log_level = getattr(logger, upper_log_level_string)
spiff_log_level = getattr(logger, upper_log_level_string)
log_formatter = get_log_formatter(app)
app.logger.debug("Printing log to create app logger")
# the json formatter is nice for real environments but makes
# debugging locally a little more difficult
if app.config["ENV_IDENTIFIER"] != "local_development":
json_formatter = JsonFormatter(
{
"level": "levelname",
"message": "message",
"loggerName": "name",
"processName": "processName",
"processID": "process",
"threadName": "threadName",
"threadID": "thread",
"timestamp": "asctime",
}
)
log_formatter = json_formatter
spiff_logger_filehandler = None
if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]:
spiff_logger_filehandler = logging.FileHandler(f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log")
spiff_logger_filehandler = logger.FileHandler(f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log")
spiff_logger_filehandler.setLevel(spiff_log_level)
spiff_logger_filehandler.setFormatter(log_formatter)
@ -143,10 +127,10 @@ def setup_logger(app: Flask) -> None:
loggers_to_exclude_from_debug.append("sqlalchemy")
# make all loggers act the same
for name in logging.root.manager.loggerDict:
for name in logger.root.manager.loggerDict:
# use a regex so spiffworkflow_backend isn't filtered out
if not re.match(r"^spiff\b", name):
the_logger = logging.getLogger(name)
the_logger = logger.getLogger(name)
the_logger.setLevel(log_level)
if spiff_logger_filehandler:
the_logger.handlers = []
@ -168,9 +152,32 @@ def setup_logger(app: Flask) -> None:
if exclude_logger_name_from_debug:
the_logger.setLevel("INFO")
if not exclude_logger_name_from_logging:
the_logger.addHandler(logging.StreamHandler(sys.stdout))
if exclude_logger_name_from_logging:
the_logger.setLevel("ERROR")
the_logger.addHandler(logger.StreamHandler(sys.stdout))
for the_handler in the_logger.handlers:
the_handler.setFormatter(log_formatter)
the_handler.setLevel(log_level)
def get_log_formatter(app: Flask) -> logging.Formatter:
log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# the json formatter is nice for real environments but makes
# debugging locally a little more difficult
if app.config["ENV_IDENTIFIER"] != "local_development":
json_formatter = JsonFormatter(
{
"level": "levelname",
"message": "message",
"loggerName": "name",
"processName": "processName",
"processID": "process",
"threadName": "threadName",
"threadID": "thread",
"timestamp": "asctime",
}
)
log_formatter = json_formatter
return log_formatter