projects
/
python_utils.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
More cleanup, yey!
[python_utils.git]
/
logging_utils.py
diff --git
a/logging_utils.py
b/logging_utils.py
index fdbb7a3d48daecb4e3b81ed4aad4bf0e11a79241..706a0543a93c4fa04b37ed87b6519405fd4505cf 100644
(file)
--- a/
logging_utils.py
+++ b/
logging_utils.py
@@
-1,4
+1,5
@@
#!/usr/bin/env python3
#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
"""Utilities related to logging."""
"""Utilities related to logging."""
@@
-13,7
+14,7
@@
import random
import sys
from logging.config import fileConfig
from logging.handlers import RotatingFileHandler, SysLogHandler
import sys
from logging.config import fileConfig
from logging.handlers import RotatingFileHandler, SysLogHandler
-from typing import Any, Callable, Dict, Iterable, List,
Mapping,
Optional
+from typing import Any, Callable, Dict, Iterable, List, Optional
import pytz
from overrides import overrides
import pytz
from overrides import overrides
@@
-174,8
+175,8
@@
cfg.add_argument(
),
)
),
)
-
built_in_print
= print
-
logging_initialized
= False
+
BUILT_IN_PRINT
= print
+
LOGGING_INITIALIZED
= False
# A map from logging_callsite_id -> count of logged messages.
# A map from logging_callsite_id -> count of logged messages.
@@
-223,8
+224,8
@@
class SquelchRepeatedMessagesFilter(logging.Filter):
"""
def __init__(self) -> None:
"""
def __init__(self) -> None:
- self.counters: collections.Counter = collections.Counter()
super().__init__()
super().__init__()
+ self.counters: collections.Counter = collections.Counter()
@overrides
def filter(self, record: logging.LogRecord) -> bool:
@overrides
def filter(self, record: logging.LogRecord) -> bool:
@@
-257,9
+258,7
@@
class DynamicPerScopeLoggingLevelFilter(logging.Filter):
per_scope_logging_levels: str,
) -> None:
super().__init__()
per_scope_logging_levels: str,
) -> None:
super().__init__()
- self.valid_levels = set(
- ['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
- )
+ self.valid_levels = set(['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])
self.default_logging_level = default_logging_level
self.level_by_scope = {}
if per_scope_logging_levels is not None:
self.default_logging_level = default_logging_level
self.level_by_scope = {}
if per_scope_logging_levels is not None:
@@
-286,9
+285,9
@@
class DynamicPerScopeLoggingLevelFilter(logging.Filter):
file=sys.stderr,
)
continue
file=sys.stderr,
)
continue
- self.level_by_scope[
- scope
-
] = DynamicPerScopeLoggingLevelFilter.level_name_to_level(level
)
+ self.level_by_scope[
scope] = DynamicPerScopeLoggingLevelFilter.level_name_to_level(
+ level
+ )
@overrides
def filter(self, record: logging.LogRecord) -> bool:
@overrides
def filter(self, record: logging.LogRecord) -> bool:
@@
-386,48
+385,39
@@
class MillisecondAwareFormatter(logging.Formatter):
@overrides
def formatTime(self, record, datefmt=None):
@overrides
def formatTime(self, record, datefmt=None):
- ct = MillisecondAwareFormatter.converter(
- record.created, pytz.timezone("US/Pacific")
- )
+ ct = MillisecondAwareFormatter.converter(record.created, pytz.timezone("US/Pacific"))
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
- s =
"%s,%03d" % (t, record.msecs)
+ s =
f"{t},{record.msecs:%03d}"
return s
def log_about_logging(
return s
def log_about_logging(
- logger, default_logging_level, preexisting_handlers_count, fmt, facility_name
+ logger,
+ default_logging_level,
+ preexisting_handlers_count,
+ fmt,
+ facility_name,
):
):
- level_name = logging._levelToName.get(
- default_logging_level, str(default_logging_level)
- )
+ level_name = logging._levelToName.get(default_logging_level, str(default_logging_level))
logger.debug(f'Initialized global logging; default logging level is {level_name}.')
logger.debug(f'Initialized global logging; default logging level is {level_name}.')
- if (
- config.config['logging_clear_preexisting_handlers']
- and preexisting_handlers_count > 0
- ):
+ if config.config['logging_clear_preexisting_handlers'] and preexisting_handlers_count > 0:
msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
logger.warning(msg)
logger.debug(f'Logging format specification is "{fmt}"')
if config.config['logging_debug_threads']:
msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
logger.warning(msg)
logger.debug(f'Logging format specification is "{fmt}"')
if config.config['logging_debug_threads']:
- logger.debug(
- '...Logging format spec captures tid/pid (--logging_debug_threads)'
- )
+ logger.debug('...Logging format spec captures tid/pid (--logging_debug_threads)')
if config.config['logging_debug_modules']:
logger.debug(
'...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
)
if config.config['logging_syslog']:
if config.config['logging_debug_modules']:
logger.debug(
'...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
)
if config.config['logging_syslog']:
- logger.debug(
- f'Logging to syslog as {facility_name} with priority mapping based on level'
- )
+ logger.debug(f'Logging to syslog as {facility_name} with priority mapping based on level')
if config.config['logging_filename']:
logger.debug(f'Logging to filename {config.config["logging_filename"]}')
if config.config['logging_filename']:
logger.debug(f'Logging to filename {config.config["logging_filename"]}')
- logger.debug(
- f'...with {config.config["logging_filename_maxsize"]} bytes max file size.'
- )
+ logger.debug(f'...with {config.config["logging_filename_maxsize"]} bytes max file size.')
logger.debug(
f'...and {config.config["logging_filename_count"]} rotating backup file count.'
)
logger.debug(
f'...and {config.config["logging_filename_count"]} rotating backup file count.'
)
@@
-464,10
+454,10
@@
def log_about_logging(
def initialize_logging(logger=None) -> logging.Logger:
def initialize_logging(logger=None) -> logging.Logger:
- global
logging_initialized
- if
logging_initialized
:
+ global
LOGGING_INITIALIZED
+ if
LOGGING_INITIALIZED
:
return logging.getLogger()
return logging.getLogger()
-
logging_initialized
= True
+
LOGGING_INITIALIZED
= True
if logger is None:
logger = logging.getLogger()
if logger is None:
logger = logging.getLogger()
@@
-487,11
+477,9
@@
def initialize_logging(logger=None) -> logging.Logger:
handler: Optional[logging.Handler] = None
# Global default logging level (--logging_level)
handler: Optional[logging.Handler] = None
# Global default logging level (--logging_level)
- default_logging_level = getattr(
- logging, config.config['logging_level'].upper(), None
- )
+ default_logging_level = getattr(logging, config.config['logging_level'].upper(), None)
if not isinstance(default_logging_level, int):
if not isinstance(default_logging_level, int):
- raise ValueError(
'Invalid level: %s' % config.config['logging_level']
)
+ raise ValueError(
f'Invalid level: {config.config["logging_level"]}'
)
if config.config['logging_format']:
fmt = config.config['logging_format']
if config.config['logging_format']:
fmt = config.config['logging_format']
@@
-511,7
+499,7
@@
def initialize_logging(logger=None) -> logging.Logger:
if config.config['logging_syslog_facility']:
facility_name = 'LOG_' + config.config['logging_syslog_facility']
facility = SysLogHandler.__dict__.get(facility_name, SysLogHandler.LOG_USER) # type: ignore
if config.config['logging_syslog_facility']:
facility_name = 'LOG_' + config.config['logging_syslog_facility']
facility = SysLogHandler.__dict__.get(facility_name, SysLogHandler.LOG_USER) # type: ignore
- assert facility
+ assert facility
is not None
handler = SysLogHandler(facility=facility, address='/dev/log')
handler.setFormatter(
MillisecondAwareFormatter(
handler = SysLogHandler(facility=facility, address='/dev/log')
handler.setFormatter(
MillisecondAwareFormatter(
@@
-575,7
+563,6
@@
def initialize_logging(logger=None) -> logging.Logger:
logger.propagate = False
if config.config['logging_captures_prints']:
logger.propagate = False
if config.config['logging_captures_prints']:
- global built_in_print
import builtins
def print_and_also_log(*arg, **kwarg):
import builtins
def print_and_also_log(*arg, **kwarg):
@@
-584,7
+571,7
@@
def initialize_logging(logger=None) -> logging.Logger:
logger.warning(*arg)
else:
logger.info(*arg)
logger.warning(*arg)
else:
logger.info(*arg)
-
built_in_print
(*arg, **kwarg)
+
BUILT_IN_PRINT
(*arg, **kwarg)
builtins.print = print_and_also_log
builtins.print = print_and_also_log
@@
-679,7
+666,7
@@
class OutputMultiplexer(object):
self.h: Optional[List[Any]] = None
if handles is not None:
self.h: Optional[List[Any]] = None
if handles is not None:
- self.h =
[handle for handle in handles]
+ self.h =
list(handles)
else:
if destination_bitv & OutputMultiplexer.Destination.FILEHANDLES:
raise ValueError("Handle argument is required if bitv & FILEHANDLES")
else:
if destination_bitv & OutputMultiplexer.Destination.FILEHANDLES:
raise ValueError("Handle argument is required if bitv & FILEHANDLES")