return False
-def parse(entry_module: Optional[str]) -> Dict[str, Any]:
- """Main program should call this early in main(). Note that the
- bootstrap.initialize wrapper takes care of this automatically.
+def reorder_arg_action_groups(entry_module: Optional[str]):
+ reordered_action_groups = []
+ for group in args._action_groups:
+ if entry_module is not None and entry_module in group.title:
+ reordered_action_groups.append(group)
+ elif program_name in group.title:
+ reordered_action_groups.append(group)
+ else:
+ reordered_action_groups.insert(0, group)
+ return reordered_action_groups
- """
- global config_parse_called
- if config_parse_called:
- return config
+def augment_sys_argv_from_environment_variables():
global saved_messages
-
- # If we're about to do the usage message dump, put the main module's
- # argument group last in the list (if possible) so that when the user
- # passes -h or --help, it will be visible on the screen w/o scrolling.
- reordered_action_groups = []
- global prog
- for arg in sys.argv:
- if arg == '--help' or arg == '-h':
- for group in args._action_groups:
- if entry_module is not None and entry_module in group.title:
- reordered_action_groups.append(group)
- elif program_name in group.title:
- reordered_action_groups.append(group)
- else:
- reordered_action_groups.insert(0, group)
- args._action_groups = reordered_action_groups
-
- # Examine the environment for variables that match known flags.
- # For a flag called --example_flag the corresponding environment
- # variable would be called EXAMPLE_FLAG.
usage_message = args.format_usage()
optional = False
var = ''
sys.argv.append(value)
var = ''
env = ''
- else:
- next
- # Look for loadfile and read/parse it if present.
+
+def augment_sys_argv_from_loadfile():
+ global saved_messages
loadfile = None
saw_other_args = False
grab_next_arg = False
saw_other_args = True
if loadfile is not None:
- if saw_other_args:
- msg = f'Augmenting commandline arguments with those from {loadfile}.'
- print(msg, file=sys.stderr)
- saved_messages.append(msg)
if not os.path.exists(loadfile):
- print(
- f'ERROR: --config_loadfile argument must be a file, {loadfile} not found.',
- file=sys.stderr,
+ raise Exception(
+ f'ERROR: --config_loadfile argument must be a file, {loadfile} not found.'
)
- sys.exit(-1)
+ if saw_other_args:
+ msg = f'Augmenting commandline arguments with those from {loadfile}.'
+ else:
+ msg = f'Reading commandline arguments from {loadfile}.'
+ print(msg, file=sys.stderr)
+ saved_messages.append(msg)
+
with open(loadfile, 'r') as rf:
newargs = rf.readlines()
newargs = [arg.strip('\n') for arg in newargs if 'config_savefile' not in arg]
sys.argv += newargs
+
+def parse(entry_module: Optional[str]) -> Dict[str, Any]:
+ """Main program should call this early in main(). Note that the
+ bootstrap.initialize wrapper takes care of this automatically.
+
+ """
+ global config_parse_called
+ if config_parse_called:
+ return config
+ global saved_messages
+
+ # If we're about to do the usage message dump, put the main
+ # module's argument group last in the list (if possible) so that
+ # when the user passes -h or --help, it will be visible on the
+ # screen w/o scrolling.
+ for arg in sys.argv:
+ if arg == '--help' or arg == '-h':
+ args._action_groups = reorder_arg_action_groups(entry_module)
+
+ # Examine the environment for variables that match known flags.
+ # For a flag called --example_flag the corresponding environment
+ # variable would be called EXAMPLE_FLAG. If found, hackily add
+ # these into sys.argv to be parsed.
+ augment_sys_argv_from_environment_variables()
+
+ # Look for loadfile and read/parse it if present. This also
+ # works by jamming these values onto sys.argv.
+ augment_sys_argv_from_loadfile()
+
# Parse (possibly augmented, possibly completely overwritten)
# commandline args with argparse normally and populate config.
known, unknown = args.parse_known_args()
return s
+def log_about_logging(
+ logger, default_logging_level, preexisting_handlers_count, fmt, facility_name
+):
+ level_name = logging._levelToName.get(
+ default_logging_level, str(default_logging_level)
+ )
+ logger.debug(f'Initialized global logging; default logging level is {level_name}.')
+ if (
+ config.config['logging_clear_preexisting_handlers']
+ and preexisting_handlers_count > 0
+ ):
+ msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
+ logger.warning(msg)
+ logger.debug(f'Logging format specification is "{fmt}"')
+ if config.config['logging_debug_threads']:
+ logger.debug(
+ '...Logging format spec captures tid/pid (--logging_debug_threads)'
+ )
+ if config.config['logging_debug_modules']:
+ logger.debug(
+ '...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
+ )
+ if config.config['logging_syslog']:
+ logger.debug(
+ f'Logging to syslog as {facility_name} with priority mapping based on level'
+ )
+ if config.config['logging_filename']:
+ logger.debug(f'Logging to filename {config.config["logging_filename"]}')
+ logger.debug(
+ f'...with {config.config["logging_filename_maxsize"]} bytes max file size.'
+ )
+ logger.debug(
+ f'...and {config.config["logging_filename_count"]} rotating backup file count.'
+ )
+ if config.config['logging_console']:
+ logger.debug('Logging to the console (stderr).')
+ if config.config['logging_info_is_print']:
+ logger.debug(
+ 'Logging logger.info messages will be repeated on stdout (--logging_info_is_print)'
+ )
+ if config.config['logging_squelch_repeats']:
+ logger.debug(
+ 'Logging code allowed to request repeated messages be squelched (--logging_squelch_repeats)'
+ )
+ else:
+ logger.debug(
+ 'Logging code forbidden to request messages be squelched; all messages logged (--no_logging_squelch_repeats)'
+ )
+ if config.config['logging_probabilistically']:
+ logger.debug(
+ 'Logging code is allowed to request probabilistic logging (--logging_probabilistically)'
+ )
+ else:
+ logger.debug(
+ 'Logging code is forbidden to request probabilistic logging; messages always logged (--no_logging_probabilistically)'
+ )
+ if config.config['lmodule']:
+ logger.debug(
+ f'Logging dynamic per-module logging enabled (--lmodule={config.config["lmodule"]})'
+ )
+ if config.config['logging_captures_prints']:
+ logger.debug(
+ 'Logging will capture printed data as logger.info messages (--logging_captures_prints)'
+ )
+
+
def initialize_logging(logger=None) -> logging.Logger:
global logging_initialized
if logging_initialized:
if config.config['logging_debug_modules']:
fmt = f'%(filename)s:%(funcName)s:%(lineno)s|{fmt}'
+ facility_name = None
if config.config['logging_syslog']:
if sys.platform not in ('win32', 'cygwin'):
if config.config['logging_syslog_facility']:
logger.propagate = False
if config.config['logging_captures_prints']:
- import builtins
-
global built_in_print
+ import builtins
def print_and_also_log(*arg, **kwarg):
f = kwarg.get('file', None)
# At this point the logger is ready, handlers are set up,
# etc... so log about the logging configuration.
-
- level_name = logging._levelToName.get(
- default_logging_level, str(default_logging_level)
+ log_about_logging(
+ logger,
+ default_logging_level,
+ preexisting_handlers_count,
+ fmt,
+ facility_name,
)
- logger.debug(f'Initialized global logging; default logging level is {level_name}.')
- if (
- config.config['logging_clear_preexisting_handlers']
- and preexisting_handlers_count > 0
- ):
- msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
- logger.warning(msg)
- logger.debug(f'Logging format specification is "{fmt}"')
- if config.config['logging_debug_threads']:
- logger.debug(
- '...Logging format spec captures tid/pid (--logging_debug_threads)'
- )
- if config.config['logging_debug_modules']:
- logger.debug(
- '...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
- )
- if config.config['logging_syslog']:
- logger.debug(
- f'Logging to syslog as {facility_name} with priority mapping based on level'
- )
- if config.config['logging_filename']:
- logger.debug(f'Logging to filename {config.config["logging_filename"]}')
- logger.debug(
- f'...with {config.config["logging_filename_maxsize"]} bytes max file size.'
- )
- logger.debug(
- f'...and {config.config["logging_filename_count"]} rotating backup file count.'
- )
- if config.config['logging_console']:
- logger.debug('Logging to the console (stderr).')
- if config.config['logging_info_is_print']:
- logger.debug(
- 'Logging logger.info messages will be repeated on stdout (--logging_info_is_print)'
- )
- if config.config['logging_squelch_repeats']:
- logger.debug(
- 'Logging code allowed to request repeated messages be squelched (--logging_squelch_repeats)'
- )
- else:
- logger.debug(
- 'Logging code forbidden to request messages be squelched; all messages logged (--no_logging_squelch_repeats)'
- )
- if config.config['logging_probabilistically']:
- logger.debug(
- 'Logging code is allowed to request probabilistic logging (--logging_probabilistically)'
- )
- else:
- logger.debug(
- 'Logging code is forbidden to request probabilistic logging; messages always logged (--no_logging_probabilistically)'
- )
- if config.config['lmodule']:
- logger.debug(
- f'Logging dynamic per-module logging enabled (--lmodule={config.config["lmodule"]})'
- )
- if config.config['logging_captures_prints']:
- logger.debug(
- 'Logging will capture printed data as logger.info messages (--logging_captures_prints)'
- )
return logger
"""Bits in the destination_bitv bitvector. Used to indicate the
output destination."""
- LOG_DEBUG = 0x01 # ⎫
- LOG_INFO = 0x02 # ⎪
- LOG_WARNING = 0x04 # ⎬ Must provide logger to the c'tor.
- LOG_ERROR = 0x08 # ⎪
+ # fmt: off
+ LOG_DEBUG = 0x01 # ⎫
+ LOG_INFO = 0x02 # ⎪
+ LOG_WARNING = 0x04 # ⎬ Must provide logger to the c'tor.
+ LOG_ERROR = 0x08 # ⎪
LOG_CRITICAL = 0x10 # ⎭
- FILENAMES = 0x20 # Must provide a filename to the c'tor.
- FILEHANDLES = 0x40 # Must provide a handle to the c'tor.
+ FILENAMES = 0x20 # Must provide a filename to the c'tor.
+ FILEHANDLES = 0x40 # Must provide a handle to the c'tor.
HLOG = 0x80
ALL_LOG_DESTINATIONS = (
LOG_DEBUG | LOG_INFO | LOG_WARNING | LOG_ERROR | LOG_CRITICAL
)
ALL_OUTPUT_DESTINATIONS = 0x8F
+ # fmt: on
def __init__(
self,