From 13d7fa89682cc2e3784cc88a165f90b233c56ec8 Mon Sep 17 00:00:00 2001 From: jasquat <2487833+jasquat@users.noreply.github.com> Date: Thu, 28 Mar 2024 18:48:44 +0000 Subject: [PATCH] celery logging (#1295) * some initial code to handle celery logs better w/ burnettk * some more work to get celery logs properly formatting w/ burnettk * added logger setup for celery w/ burnettk * added logger setup for celery w/ burnettk * use the same logger setup method for both celery and the flask server w/ burnettk * move the log formatter to own method w/ burnettk --------- Co-authored-by: jasquat --- .../background_processing/celery_worker.py | 21 +++++++ .../spiffworkflow_backend/config/__init__.py | 5 +- .../services/logging_service.py | 59 +++++++++++-------- 3 files changed, 57 insertions(+), 28 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/background_processing/celery_worker.py b/spiffworkflow-backend/src/spiffworkflow_backend/background_processing/celery_worker.py index c63d1c043..0bbda8869 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/background_processing/celery_worker.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/background_processing/celery_worker.py @@ -1,7 +1,28 @@ +import logging +import sys +from typing import Any + +import celery + from spiffworkflow_backend import create_app + +# we need to import tasks from this file so they can be used elsewhere in the app from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task import ( celery_task_process_instance_run, # noqa: F401 ) +from spiffworkflow_backend.services.logging_service import get_log_formatter +from spiffworkflow_backend.services.logging_service import setup_logger_for_app the_flask_app = create_app() + setting_variable_to_make_celery_happy_no_idea_how_this_works = the_flask_app.celery_app + + +@celery.signals.after_setup_logger.connect # type: ignore +def setup_loggers(logger: Any, *args: Any, **kwargs: Any) -> None: + log_formatter = get_log_formatter(the_flask_app) + logger.handlers = [] + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.setFormatter(log_formatter) + logger.addHandler(stdout_handler) + setup_logger_for_app(the_flask_app, logger) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index d81a37ff4..7656fb5a4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -1,6 +1,7 @@ """__init__.py.""" import base64 +import logging import os import threading import uuid @@ -9,7 +10,7 @@ from urllib.parse import urlparse from flask.app import Flask from werkzeug.utils import ImportStringError -from spiffworkflow_backend.services.logging_service import setup_logger +from spiffworkflow_backend.services.logging_service import setup_logger_for_app HTTP_REQUEST_TIMEOUT_SECONDS = 15 CONNECTOR_PROXY_COMMAND_TIMEOUT = 45 @@ -216,7 +217,7 @@ def setup_config(app: Flask) -> None: app.config["PROCESS_UUID"] = uuid.uuid4() setup_database_configs(app) - setup_logger(app) + setup_logger_for_app(app, logging) app.logger.debug( f"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR: {app.config['SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR']}" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index dfd050750..d46368f55 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -83,39 +83,23 @@ class JsonFormatter(logging.Formatter): return json.dumps(message_dict, default=str) -def setup_logger(app: Flask) -> None: +def setup_logger_for_app(app: Flask, logger: Any) -> None: upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper() log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] if upper_log_level_string not in log_levels: raise InvalidLogLevelError(f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}") - log_level = getattr(logging, upper_log_level_string) - spiff_log_level = getattr(logging, upper_log_level_string) - log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + log_level = getattr(logger, upper_log_level_string) + spiff_log_level = getattr(logger, upper_log_level_string) + + log_formatter = get_log_formatter(app) app.logger.debug("Printing log to create app logger") - # the json formatter is nice for real environments but makes - # debugging locally a little more difficult - if app.config["ENV_IDENTIFIER"] != "local_development": - json_formatter = JsonFormatter( - { - "level": "levelname", - "message": "message", - "loggerName": "name", - "processName": "processName", - "processID": "process", - "threadName": "threadName", - "threadID": "thread", - "timestamp": "asctime", - } - ) - log_formatter = json_formatter - spiff_logger_filehandler = None if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]: - spiff_logger_filehandler = logging.FileHandler(f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log") + spiff_logger_filehandler = logger.FileHandler(f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log") spiff_logger_filehandler.setLevel(spiff_log_level) spiff_logger_filehandler.setFormatter(log_formatter) @@ -143,10 +127,10 @@ def setup_logger(app: Flask) -> None: loggers_to_exclude_from_debug.append("sqlalchemy") # make all loggers act the same - for name in logging.root.manager.loggerDict: + for name in logger.root.manager.loggerDict: # use a regex so spiffworkflow_backend isn't filtered out if not re.match(r"^spiff\b", name): - the_logger = logging.getLogger(name) + the_logger = logger.getLogger(name) the_logger.setLevel(log_level) if spiff_logger_filehandler: the_logger.handlers = [] @@ -168,9 +152,32 @@ def setup_logger(app: Flask) -> None: if exclude_logger_name_from_debug: the_logger.setLevel("INFO") - if not exclude_logger_name_from_logging: - the_logger.addHandler(logging.StreamHandler(sys.stdout)) + if exclude_logger_name_from_logging: + the_logger.setLevel("ERROR") + the_logger.addHandler(logger.StreamHandler(sys.stdout)) for the_handler in the_logger.handlers: the_handler.setFormatter(log_formatter) the_handler.setLevel(log_level) + + +def get_log_formatter(app: Flask) -> logging.Formatter: + log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + + # the json formatter is nice for real environments but makes + # debugging locally a little more difficult + if app.config["ENV_IDENTIFIER"] != "local_development": + json_formatter = JsonFormatter( + { + "level": "levelname", + "message": "message", + "loggerName": "name", + "processName": "processName", + "processID": "process", + "threadName": "threadName", + "threadID": "thread", + "timestamp": "asctime", + } + ) + log_formatter = json_formatter + return log_formatter