X-Git-Url: https://wannabe.guru.org/gitweb/?a=blobdiff_plain;ds=sidebyside;f=logging_utils.py;h=fdbb7a3d48daecb4e3b81ed4aad4bf0e11a79241;hb=31c81f6539969a5eba864d3305f9fb7bf716a367;hp=bf8d8b062b911507ccbd7f68f5346530c7bd0d79;hpb=36fea7f15ed17150691b5b3ead75450e575229ef;p=python_utils.git diff --git a/logging_utils.py b/logging_utils.py index bf8d8b0..fdbb7a3 100644 --- a/logging_utils.py +++ b/logging_utils.py @@ -8,23 +8,22 @@ import datetime import enum import io import logging -from logging.handlers import RotatingFileHandler, SysLogHandler import os import random import sys -from typing import Callable, Iterable, Mapping, Optional +from logging.config import fileConfig +from logging.handlers import RotatingFileHandler, SysLogHandler +from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional -from overrides import overrides import pytz +from overrides import overrides # This module is commonly used by others in here and should avoid # taking any unnecessary dependencies back on them. import argparse_utils import config -cfg = config.add_commandline_args( - f'Logging ({__file__})', 'Args related to logging' -) +cfg = config.add_commandline_args(f'Logging ({__file__})', 'Args related to logging') cfg.add_argument( '--logging_config_file', type=argparse_utils.valid_filename, @@ -180,7 +179,7 @@ logging_initialized = False # A map from logging_callsite_id -> count of logged messages. -squelched_logging_counts: Mapping[str, int] = {} +squelched_logging_counts: Dict[str, int] = {} def squelch_repeated_log_messages(squelch_after_n_repeats: int) -> Callable: @@ -224,7 +223,7 @@ class SquelchRepeatedMessagesFilter(logging.Filter): """ def __init__(self) -> None: - self.counters = collections.Counter() + self.counters: collections.Counter = collections.Counter() super().__init__() @overrides @@ -233,9 +232,7 @@ class SquelchRepeatedMessagesFilter(logging.Filter): if id1 not in squelched_logging_counts: return True threshold = squelched_logging_counts[id1] - logsite = ( - f'{record.pathname}+{record.lineno}+{record.levelno}+{record.msg}' - ) + logsite = f'{record.pathname}+{record.lineno}+{record.levelno}+{record.msg}' count = self.counters[logsite] self.counters[logsite] += 1 return count < threshold @@ -317,7 +314,7 @@ class DynamicPerScopeLoggingLevelFilter(logging.Filter): # A map from function_identifier -> probability of logging (0.0%..100.0%) -probabilistic_logging_levels: Mapping[str, float] = {} +probabilistic_logging_levels: Dict[str, float] = {} def logging_is_probabilistic(probability_of_logging: float) -> Callable: @@ -385,7 +382,7 @@ class MillisecondAwareFormatter(logging.Formatter): """ - converter = datetime.datetime.fromtimestamp + converter = datetime.datetime.fromtimestamp # type: ignore @overrides def formatTime(self, record, datefmt=None): @@ -400,10 +397,76 @@ class MillisecondAwareFormatter(logging.Formatter): return s +def log_about_logging( + logger, default_logging_level, preexisting_handlers_count, fmt, facility_name +): + level_name = logging._levelToName.get( + default_logging_level, str(default_logging_level) + ) + logger.debug(f'Initialized global logging; default logging level is {level_name}.') + if ( + config.config['logging_clear_preexisting_handlers'] + and preexisting_handlers_count > 0 + ): + msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)' + logger.warning(msg) + logger.debug(f'Logging format specification is "{fmt}"') + if config.config['logging_debug_threads']: + logger.debug( + '...Logging format spec captures tid/pid (--logging_debug_threads)' + ) + if config.config['logging_debug_modules']: + logger.debug( + '...Logging format spec captures files/functions/lineno (--logging_debug_modules)' + ) + if config.config['logging_syslog']: + logger.debug( + f'Logging to syslog as {facility_name} with priority mapping based on level' + ) + if config.config['logging_filename']: + logger.debug(f'Logging to filename {config.config["logging_filename"]}') + logger.debug( + f'...with {config.config["logging_filename_maxsize"]} bytes max file size.' + ) + logger.debug( + f'...and {config.config["logging_filename_count"]} rotating backup file count.' + ) + if config.config['logging_console']: + logger.debug('Logging to the console (stderr).') + if config.config['logging_info_is_print']: + logger.debug( + 'Logging logger.info messages will be repeated on stdout (--logging_info_is_print)' + ) + if config.config['logging_squelch_repeats']: + logger.debug( + 'Logging code allowed to request repeated messages be squelched (--logging_squelch_repeats)' + ) + else: + logger.debug( + 'Logging code forbidden to request messages be squelched; all messages logged (--no_logging_squelch_repeats)' + ) + if config.config['logging_probabilistically']: + logger.debug( + 'Logging code is allowed to request probabilistic logging (--logging_probabilistically)' + ) + else: + logger.debug( + 'Logging code is forbidden to request probabilistic logging; messages always logged (--no_logging_probabilistically)' + ) + if config.config['lmodule']: + logger.debug( + f'Logging dynamic per-module logging enabled (--lmodule={config.config["lmodule"]})' + ) + if config.config['logging_captures_prints']: + logger.debug( + 'Logging will capture printed data as logger.info messages (--logging_captures_prints)' + ) + + def initialize_logging(logger=None) -> logging.Logger: global logging_initialized if logging_initialized: - return + return logging.getLogger() logging_initialized = True if logger is None: @@ -417,10 +480,11 @@ def initialize_logging(logger=None) -> logging.Logger: preexisting_handlers_count += 1 if config.config['logging_config_file'] is not None: - logging.config.fileConfig('logging.conf') + fileConfig(config.config['logging_config_file']) return logger - handlers = [] + handlers: List[logging.Handler] = [] + handler: Optional[logging.Handler] = None # Global default logging level (--logging_level) default_logging_level = getattr( @@ -441,15 +505,13 @@ def initialize_logging(logger=None) -> logging.Logger: if config.config['logging_debug_modules']: fmt = f'%(filename)s:%(funcName)s:%(lineno)s|{fmt}' + facility_name = None if config.config['logging_syslog']: if sys.platform not in ('win32', 'cygwin'): if config.config['logging_syslog_facility']: - facility_name = ( - 'LOG_' + config.config['logging_syslog_facility'] - ) - facility = SysLogHandler.__dict__.get( - facility_name, SysLogHandler.LOG_USER - ) + facility_name = 'LOG_' + config.config['logging_syslog_facility'] + facility = SysLogHandler.__dict__.get(facility_name, SysLogHandler.LOG_USER) # type: ignore + assert facility handler = SysLogHandler(facility=facility, address='/dev/log') handler.setFormatter( MillisecondAwareFormatter( @@ -513,9 +575,8 @@ def initialize_logging(logger=None) -> logging.Logger: logger.propagate = False if config.config['logging_captures_prints']: - import builtins - global built_in_print + import builtins def print_and_also_log(*arg, **kwarg): f = kwarg.get('file', None) @@ -529,70 +590,13 @@ def initialize_logging(logger=None) -> logging.Logger: # At this point the logger is ready, handlers are set up, # etc... so log about the logging configuration. - - level_name = logging._levelToName.get( - default_logging_level, str(default_logging_level) + log_about_logging( + logger, + default_logging_level, + preexisting_handlers_count, + fmt, + facility_name, ) - logger.debug( - f'Initialized global logging; default logging level is {level_name}.' - ) - if ( - config.config['logging_clear_preexisting_handlers'] - and preexisting_handlers_count > 0 - ): - msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)' - logger.warning(msg) - logger.debug(f'Logging format specification is "{fmt}"') - if config.config['logging_debug_threads']: - logger.debug( - '...Logging format spec captures tid/pid (--logging_debug_threads)' - ) - if config.config['logging_debug_modules']: - logger.debug( - '...Logging format spec captures files/functions/lineno (--logging_debug_modules)' - ) - if config.config['logging_syslog']: - logger.debug( - f'Logging to syslog as {facility_name} with priority mapping based on level' - ) - if config.config['logging_filename']: - logger.debug(f'Logging to filename {config.config["logging_filename"]}') - logger.debug( - f'...with {config.config["logging_filename_maxsize"]} bytes max file size.' - ) - logger.debug( - f'...and {config.config["logging_filename_count"]} rotating backup file count.' - ) - if config.config['logging_console']: - logger.debug('Logging to the console (stderr).') - if config.config['logging_info_is_print']: - logger.debug( - 'Logging logger.info messages will be repeated on stdout (--logging_info_is_print)' - ) - if config.config['logging_squelch_repeats']: - logger.debug( - 'Logging code allowed to request repeated messages be squelched (--logging_squelch_repeats)' - ) - else: - logger.debug( - 'Logging code forbidden to request messages be squelched; all messages logged (--no_logging_squelch_repeats)' - ) - if config.config['logging_probabilistically']: - logger.debug( - 'Logging code is allowed to request probabilistic logging (--logging_probabilistically)' - ) - else: - logger.debug( - 'Logging code is forbidden to request probabilistic logging; messages always logged (--no_logging_probabilistically)' - ) - if config.config['lmodule']: - logger.debug( - f'Logging dynamic per-module logging enabled (--lmodule={config.config["lmodule"]})' - ) - if config.config['logging_captures_prints']: - logger.debug( - 'Logging will capture printed data as logger.info messages (--logging_captures_prints)' - ) return logger @@ -638,18 +642,20 @@ class OutputMultiplexer(object): """Bits in the destination_bitv bitvector. Used to indicate the output destination.""" - LOG_DEBUG = 0x01 # ⎫ - LOG_INFO = 0x02 # ⎪ - LOG_WARNING = 0x04 # ⎬ Must provide logger to the c'tor. - LOG_ERROR = 0x08 # ⎪ + # fmt: off + LOG_DEBUG = 0x01 # ⎫ + LOG_INFO = 0x02 # ⎪ + LOG_WARNING = 0x04 # ⎬ Must provide logger to the c'tor. + LOG_ERROR = 0x08 # ⎪ LOG_CRITICAL = 0x10 # ⎭ - FILENAMES = 0x20 # Must provide a filename to the c'tor. - FILEHANDLES = 0x40 # Must provide a handle to the c'tor. + FILENAMES = 0x20 # Must provide a filename to the c'tor. + FILEHANDLES = 0x40 # Must provide a handle to the c'tor. HLOG = 0x80 ALL_LOG_DESTINATIONS = ( LOG_DEBUG | LOG_INFO | LOG_WARNING | LOG_ERROR | LOG_CRITICAL ) ALL_OUTPUT_DESTINATIONS = 0x8F + # fmt: on def __init__( self, @@ -663,24 +669,20 @@ class OutputMultiplexer(object): logger = logging.getLogger(None) self.logger = logger + self.f: Optional[List[Any]] = None if filenames is not None: - self.f = [ - open(filename, 'wb', buffering=0) for filename in filenames - ] + self.f = [open(filename, 'wb', buffering=0) for filename in filenames] else: - if destination_bitv & OutputMultiplexer.FILENAMES: - raise ValueError( - "Filenames argument is required if bitv & FILENAMES" - ) + if destination_bitv & OutputMultiplexer.Destination.FILENAMES: + raise ValueError("Filenames argument is required if bitv & FILENAMES") self.f = None + self.h: Optional[List[Any]] = None if handles is not None: self.h = [handle for handle in handles] else: if destination_bitv & OutputMultiplexer.Destination.FILEHANDLES: - raise ValueError( - "Handle argument is required if bitv & FILEHANDLES" - ) + raise ValueError("Handle argument is required if bitv & FILEHANDLES") self.h = None self.set_destination_bitv(destination_bitv) @@ -690,13 +692,9 @@ class OutputMultiplexer(object): def set_destination_bitv(self, destination_bitv: int): if destination_bitv & self.Destination.FILENAMES and self.f is None: - raise ValueError( - "Filename argument is required if bitv & FILENAMES" - ) + raise ValueError("Filename argument is required if bitv & FILENAMES") if destination_bitv & self.Destination.FILEHANDLES and self.h is None: - raise ValueError( - "Handle argument is required if bitv & FILEHANDLES" - ) + raise ValueError("Handle argument is required if bitv & FILEHANDLES") self.destination_bitv = destination_bitv def print(self, *args, **kwargs): @@ -719,18 +717,12 @@ class OutputMultiplexer(object): end = "\n" if end == '\n': buf += '\n' - if ( - self.destination_bitv & self.Destination.FILENAMES - and self.f is not None - ): + if self.destination_bitv & self.Destination.FILENAMES and self.f is not None: for _ in self.f: _.write(buf.encode('utf-8')) _.flush() - if ( - self.destination_bitv & self.Destination.FILEHANDLES - and self.h is not None - ): + if self.destination_bitv & self.Destination.FILEHANDLES and self.h is not None: for _ in self.h: _.write(buf) _.flush()