#!/usr/bin/env python3
+# © Copyright 2021-2022, Scott Gasch
+
+"""This is a module for wrapping around python programs and doing some
+minor setup and tear down work for them. With it, you will get:
+
+* The ability to break into pdb on unhandled exceptions,
+* automatic support for :file:`config.py` (argument parsing)
+* automatic logging support for :file:`logging.py`,
+* the ability to enable code profiling,
+* the ability to enable module import auditing,
+* optional memory profiling for your program,
+* ability to set random seed via commandline,
+* automatic program timing and reporting,
+* more verbose error handling and reporting,
+
+Most of these are enabled and/or configured via commandline flags
+(see below).
+
+"""
+
import functools
import importlib
import logging
sys.__excepthook__(exc_type, exc_value, exc_tb)
return
else:
+ import io
+ import traceback
+
+ tb_output = io.StringIO()
+ traceback.print_tb(exc_tb, None, tb_output)
+ print(tb_output.getvalue(), file=sys.stderr)
+ logger.error(tb_output.getvalue())
+ tb_output.close()
+
+ # stdin or stderr is redirected, just do the normal thing
if not sys.stderr.isatty() or not sys.stdin.isatty():
- # stdin or stderr is redirected, just do the normal thing
ORIGINAL_EXCEPTION_HOOK(exc_type, exc_value, exc_tb)
- else:
- # a terminal is attached and stderr is not redirected, maybe debug.
- import traceback
- traceback.print_exception(exc_type, exc_value, exc_tb)
+ else: # a terminal is attached and stderr isn't redirected, maybe debug.
if config.config['debug_unhandled_exceptions']:
+ logger.info("Invoking the debugger...")
import pdb
- logger.info("Invoking the debugger...")
pdb.pm()
else:
ORIGINAL_EXCEPTION_HOOK(exc_type, exc_value, exc_tb)
class ImportInterceptor(importlib.abc.MetaPathFinder):
+ """An interceptor that always allows module load events but dumps a
+ record into the log and onto stdout when modules are loaded and
+ produces an audit of who imported what at the end of the run. It
+ can't see any load events that happen before it, though, so move
+ bootstrap up in your __main__'s import list just temporarily to
+ get a good view.
+
+ """
+
def __init__(self):
import collect.trie
def repopulate_modules_by_filename(self):
self.module_by_filename_cache.clear()
- for mod in sys.modules:
- if hasattr(sys.modules[mod], '__file__'):
- fname = getattr(sys.modules[mod], '__file__')
+ for _, mod in sys.modules.copy().items(): # copy here because modules is volatile
+ if hasattr(mod, '__file__'):
+ fname = getattr(mod, '__file__')
else:
fname = 'unknown'
self.module_by_filename_cache[fname] = mod
def dump_all_objects() -> None:
+ """Helper code to dump all known python objects."""
+
messages = {}
all_modules = sys.modules
for obj in object.__subclasses__():
def initialize(entry_point):
"""
Remember to initialize config, initialize logging, set/log a random
- seed, etc... before running main.
+ seed, etc... before running main. If you use this decorator around
+ your main, like this::
+
+ import bootstrap
+
+ @bootstrap.initialize
+ def main():
+ whatever
+ if __name__ == '__main__':
+ main()
+
+ You get:
+
+ * The ability to break into pdb on unhandled exceptions,
+ * automatic support for :file:`config.py` (argument parsing)
+ * automatic logging support for :file:`logging.py`,
+ * the ability to enable code profiling,
+ * the ability to enable module import auditing,
+ * optional memory profiling for your program,
+ * ability to set random seed via commandline,
+ * automatic program timing and reporting,
+ * more verbose error handling and reporting,
+
+ Most of these are enabled and/or configured via commandline flags
+ (see below).
"""
@functools.wraps(entry_point)
# Try to figure out the name of the program entry point. Then
# parse configuration (based on cmdline flags, environment vars
# etc...)
- if '__globals__' in entry_point.__dict__ and '__file__' in entry_point.__globals__:
- config.parse(entry_point.__globals__['__file__'])
- else:
- config.parse(None)
+ entry_filename = None
+ entry_descr = None
+ try:
+ entry_filename = entry_point.__code__.co_filename
+ entry_descr = entry_point.__code__.__repr__()
+ except Exception:
+ if '__globals__' in entry_point.__dict__ and '__file__' in entry_point.__globals__:
+ entry_filename = entry_point.__globals__['__file__']
+ entry_descr = entry_filename
+ config.parse(entry_filename)
if config.config['trace_memory']:
import tracemalloc
# Maybe log some info about the python interpreter itself.
logger.debug(
- 'Platform: %s, maxint=0x%x, byteorder=%s',
- sys.platform, sys.maxsize, sys.byteorder
+ 'Platform: %s, maxint=0x%x, byteorder=%s', sys.platform, sys.maxsize, sys.byteorder
)
logger.debug('Python interpreter version: %s', sys.version)
logger.debug('Python implementation: %s', sys.implementation)
logger.debug('Python C API version: %s', sys.api_version)
+ if __debug__:
+ logger.debug('Python interpreter running in __debug__ mode.')
+ else:
+ logger.debug('Python interpreter running in optimized mode.')
logger.debug('Python path: %s', sys.path)
# Log something about the site_config, many things use it.
random.seed(random_seed)
# Do it, invoke the user's code. Pay attention to how long it takes.
- logger.debug('Starting %s (program entry point)', entry_point.__name__)
+ logger.debug('Starting %s (program entry point)', entry_descr)
ret = None
import stopwatch
with stopwatch.Timer() as t:
ret = entry_point(*args, **kwargs)
- logger.debug('%s (program entry point) returned %s.', entry_point.__name__, ret)
+ logger.debug('%s (program entry point) returned %s.', entry_descr, ret)
if config.config['trace_memory']:
snapshot = tracemalloc.take_snapshot()
'child system: %.4fs\n'
'machine uptime: %.4fs\n'
'walltime: %.4fs',
- utime, stime, cutime, cstime, elapsed_time, walltime
+ utime,
+ stime,
+ cutime,
+ cstime,
+ elapsed_time,
+ walltime,
)
# If it doesn't return cleanly, call attention to the return value.