2022-10-12 10:22:22 -04:00
|
|
|
"""Logging_service."""
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import re
|
2023-03-09 00:38:29 -05:00
|
|
|
import sys
|
2022-10-12 10:22:22 -04:00
|
|
|
from typing import Any
|
|
|
|
from typing import Optional
|
|
|
|
|
|
|
|
from flask.app import Flask
|
2022-11-18 16:45:44 -05:00
|
|
|
|
2022-10-12 10:22:22 -04:00
|
|
|
|
|
|
|
# flask logging formats:
|
|
|
|
# from: https://www.askpython.com/python-modules/flask/flask-logging
|
|
|
|
# %(asctime)s— The timestamp as a string.
|
|
|
|
# %(levelname)s—The logging level as a string.
|
|
|
|
# %(name)s—The logger name as a string.
|
|
|
|
# %(threadname)s—The thread name as a string.
|
|
|
|
# %(message)s—The log message.
|
|
|
|
|
|
|
|
# full message list:
|
|
|
|
# {'name': 'gunicorn.error', 'msg': 'GET /admin/token', 'args': (), 'levelname': 'DEBUG', 'levelno': 10, 'pathname': '~/.cache/pypoetry/virtualenvs/spiffworkflow-backend-R_hdWfN1-py3.10/lib/python3.10/site-packages/gunicorn/glogging.py', 'filename': 'glogging.py', 'module': 'glogging', 'exc_info': None, 'exc_text': None, 'stack_info': None, 'lineno': 267, 'funcName': 'debug', 'created': 1657307111.4513023, 'msecs': 451.30228996276855, 'relativeCreated': 1730.785846710205, 'thread': 139945864087360, 'threadName': 'MainThread', 'processName': 'MainProcess', 'process': 2109561, 'message': 'GET /admin/token', 'asctime': '2022-07-08T15:05:11.451Z'}
|
|
|
|
|
2022-10-12 15:28:52 -04:00
|
|
|
|
|
|
|
class InvalidLogLevelError(Exception):
|
|
|
|
"""InvalidLogLevelError."""
|
|
|
|
|
|
|
|
|
2022-10-12 10:22:22 -04:00
|
|
|
# originally from https://stackoverflow.com/a/70223539/6090676
|
2022-10-12 15:28:52 -04:00
|
|
|
|
|
|
|
|
2022-10-12 10:22:22 -04:00
|
|
|
class JsonFormatter(logging.Formatter):
|
|
|
|
"""Formatter that outputs JSON strings after parsing the LogRecord.
|
|
|
|
|
|
|
|
@param dict fmt_dict: Key: logging format attribute pairs. Defaults to {"message": "message"}.
|
|
|
|
@param str time_format: time.strftime() format string. Default: "%Y-%m-%dT%H:%M:%S"
|
|
|
|
@param str msec_format: Microsecond formatting. Appended at the end. Default: "%s.%03dZ"
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
fmt_dict: Optional[dict] = None,
|
|
|
|
time_format: str = "%Y-%m-%dT%H:%M:%S",
|
|
|
|
msec_format: str = "%s.%03dZ",
|
|
|
|
):
|
|
|
|
"""__init__."""
|
|
|
|
self.fmt_dict = fmt_dict if fmt_dict is not None else {"message": "message"}
|
|
|
|
self.default_time_format = time_format
|
|
|
|
self.default_msec_format = msec_format
|
|
|
|
self.datefmt = None
|
|
|
|
|
|
|
|
def usesTime(self) -> bool:
|
2023-01-19 15:37:55 -05:00
|
|
|
"""Overwritten to look for the attribute in the format dict values instead of the fmt string."""
|
2022-10-12 10:22:22 -04:00
|
|
|
return "asctime" in self.fmt_dict.values()
|
|
|
|
|
|
|
|
# we are overriding a method that returns a string and returning a dict, hence the Any
|
|
|
|
def formatMessage(self, record: logging.LogRecord) -> Any:
|
|
|
|
"""Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string.
|
|
|
|
|
|
|
|
KeyError is raised if an unknown attribute is provided in the fmt_dict.
|
|
|
|
"""
|
2023-03-15 16:24:08 -04:00
|
|
|
return {fmt_key: record.__dict__[fmt_val] for fmt_key, fmt_val in self.fmt_dict.items()}
|
2022-10-12 10:22:22 -04:00
|
|
|
|
|
|
|
def format(self, record: logging.LogRecord) -> str:
|
|
|
|
"""Mostly the same as the parent's class method.
|
|
|
|
|
|
|
|
The difference being that a dict is manipulated and dumped as JSON instead of a string.
|
|
|
|
"""
|
|
|
|
record.message = record.getMessage()
|
|
|
|
|
|
|
|
if self.usesTime():
|
|
|
|
record.asctime = self.formatTime(record, self.datefmt)
|
|
|
|
|
|
|
|
message_dict = self.formatMessage(record)
|
|
|
|
|
|
|
|
if record.exc_info:
|
|
|
|
# Cache the traceback text to avoid converting it multiple times
|
|
|
|
# (it's constant anyway)
|
|
|
|
if not record.exc_text:
|
|
|
|
record.exc_text = self.formatException(record.exc_info)
|
|
|
|
|
|
|
|
if record.exc_text:
|
|
|
|
message_dict["exc_info"] = record.exc_text
|
|
|
|
|
|
|
|
if record.stack_info:
|
|
|
|
message_dict["stack_info"] = self.formatStack(record.stack_info)
|
|
|
|
|
|
|
|
return json.dumps(message_dict, default=str)
|
|
|
|
|
|
|
|
|
|
|
|
def setup_logger(app: Flask) -> None:
|
|
|
|
"""Setup_logger."""
|
2022-10-12 15:28:52 -04:00
|
|
|
upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper()
|
|
|
|
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
|
|
|
|
|
|
|
if upper_log_level_string not in log_levels:
|
|
|
|
raise InvalidLogLevelError(
|
2023-03-15 16:24:08 -04:00
|
|
|
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}"
|
2022-10-12 15:28:52 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
log_level = getattr(logging, upper_log_level_string)
|
|
|
|
spiff_log_level = getattr(logging, upper_log_level_string)
|
2023-03-15 16:24:08 -04:00
|
|
|
log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
2022-10-12 10:22:22 -04:00
|
|
|
|
|
|
|
app.logger.debug("Printing log to create app logger")
|
|
|
|
|
|
|
|
# the json formatter is nice for real environments but makes
|
|
|
|
# debugging locally a little more difficult
|
2023-02-07 17:17:45 -05:00
|
|
|
if app.config["ENV_IDENTIFIER"] != "local_development":
|
2022-10-12 10:22:22 -04:00
|
|
|
json_formatter = JsonFormatter(
|
|
|
|
{
|
|
|
|
"level": "levelname",
|
|
|
|
"message": "message",
|
|
|
|
"loggerName": "name",
|
|
|
|
"processName": "processName",
|
|
|
|
"processID": "process",
|
|
|
|
"threadName": "threadName",
|
|
|
|
"threadID": "thread",
|
|
|
|
"timestamp": "asctime",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
log_formatter = json_formatter
|
|
|
|
|
|
|
|
spiff_logger_filehandler = None
|
|
|
|
if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]:
|
|
|
|
spiff_logger_filehandler = logging.FileHandler(
|
|
|
|
f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log"
|
|
|
|
)
|
|
|
|
spiff_logger_filehandler.setLevel(spiff_log_level)
|
|
|
|
spiff_logger_filehandler.setFormatter(log_formatter)
|
|
|
|
|
2023-03-14 18:10:49 -04:00
|
|
|
# these loggers have been deemed too verbose to be useful
|
2023-05-06 15:17:48 -04:00
|
|
|
garbage_loggers_to_exclude = ["connexion", "flask_cors.extension", "flask_cors.core", "sqlalchemy"]
|
|
|
|
|
|
|
|
# if you actually want one of these excluded loggers, there is a config option to turn it on
|
|
|
|
loggers_to_use = app.config.get("SPIFFWORKFLOW_BACKEND_LOGGERS_TO_USE", [])
|
|
|
|
if loggers_to_use is None or loggers_to_use == "":
|
|
|
|
loggers_to_use = []
|
|
|
|
else:
|
|
|
|
loggers_to_use = loggers_to_use.split(",")
|
|
|
|
for logger_to_use in loggers_to_use:
|
|
|
|
if logger_to_use in garbage_loggers_to_exclude:
|
|
|
|
garbage_loggers_to_exclude.remove(logger_to_use)
|
|
|
|
else:
|
|
|
|
app.logger.warning(
|
|
|
|
f"Logger '{logger_to_use}' not found in garbage_loggers_to_exclude. You do not need to add it to"
|
|
|
|
" SPIFFWORKFLOW_BACKEND_LOGGERS_TO_USE."
|
|
|
|
)
|
|
|
|
|
|
|
|
loggers_to_exclude_from_debug = []
|
|
|
|
|
|
|
|
if "sqlalchemy" not in garbage_loggers_to_exclude:
|
|
|
|
loggers_to_exclude_from_debug.append("sqlalchemy")
|
2023-03-14 18:10:49 -04:00
|
|
|
|
2022-10-12 10:22:22 -04:00
|
|
|
# make all loggers act the same
|
|
|
|
for name in logging.root.manager.loggerDict:
|
|
|
|
# use a regex so spiffworkflow_backend isn't filtered out
|
|
|
|
if not re.match(r"^spiff\b", name):
|
|
|
|
the_logger = logging.getLogger(name)
|
|
|
|
the_logger.setLevel(log_level)
|
|
|
|
if spiff_logger_filehandler:
|
|
|
|
the_logger.handlers = []
|
|
|
|
the_logger.propagate = False
|
|
|
|
the_logger.addHandler(spiff_logger_filehandler)
|
|
|
|
else:
|
2023-03-14 18:10:49 -04:00
|
|
|
# it's very verbose, so only add handlers for the obscure loggers when log level is DEBUG
|
|
|
|
if upper_log_level_string == "DEBUG":
|
|
|
|
if len(the_logger.handlers) < 1:
|
|
|
|
exclude_logger_name_from_logging = False
|
|
|
|
for garbage_logger in garbage_loggers_to_exclude:
|
|
|
|
if name.startswith(garbage_logger):
|
|
|
|
exclude_logger_name_from_logging = True
|
2023-05-04 12:44:04 -04:00
|
|
|
|
|
|
|
exclude_logger_name_from_debug = False
|
|
|
|
for logger_to_exclude_from_debug in loggers_to_exclude_from_debug:
|
|
|
|
if name.startswith(logger_to_exclude_from_debug):
|
|
|
|
exclude_logger_name_from_debug = True
|
|
|
|
if exclude_logger_name_from_debug:
|
|
|
|
the_logger.setLevel("INFO")
|
|
|
|
|
2023-03-14 18:10:49 -04:00
|
|
|
if not exclude_logger_name_from_logging:
|
|
|
|
the_logger.addHandler(logging.StreamHandler(sys.stdout))
|
2023-05-04 12:44:04 -04:00
|
|
|
|
2022-10-12 10:22:22 -04:00
|
|
|
for the_handler in the_logger.handlers:
|
|
|
|
the_handler.setFormatter(log_formatter)
|
|
|
|
the_handler.setLevel(log_level)
|