Started using flake8 to lint; removed some of its warnings.
authorScott <[email protected]>
Thu, 27 Jan 2022 20:09:50 +0000 (12:09 -0800)
committerScott <[email protected]>
Thu, 27 Jan 2022 20:09:50 +0000 (12:09 -0800)
config.py
logging_utils.py
profanity_filter.py

index 0edb16959bdd0c74bbb10c9556857fc498455485..f543b0aab2ed1c2a72760b934c813426426df3a8 100644 (file)
--- a/config.py
+++ b/config.py
@@ -153,36 +153,20 @@ def is_flag_already_in_argv(var: str):
     return False
 
 
-def parse(entry_module: Optional[str]) -> Dict[str, Any]:
-    """Main program should call this early in main().  Note that the
-    bootstrap.initialize wrapper takes care of this automatically.
+def reorder_arg_action_groups(entry_module: Optional[str]):
+    reordered_action_groups = []
+    for group in args._action_groups:
+        if entry_module is not None and entry_module in group.title:
+            reordered_action_groups.append(group)
+        elif program_name in group.title:
+            reordered_action_groups.append(group)
+        else:
+            reordered_action_groups.insert(0, group)
+    return reordered_action_groups
 
-    """
-    global config_parse_called
-    if config_parse_called:
-        return config
 
+def augment_sys_argv_from_environment_variables():
     global saved_messages
-
-    # If we're about to do the usage message dump, put the main module's
-    # argument group last in the list (if possible) so that when the user
-    # passes -h or --help, it will be visible on the screen w/o scrolling.
-    reordered_action_groups = []
-    global prog
-    for arg in sys.argv:
-        if arg == '--help' or arg == '-h':
-            for group in args._action_groups:
-                if entry_module is not None and entry_module in group.title:
-                    reordered_action_groups.append(group)
-                elif program_name in group.title:
-                    reordered_action_groups.append(group)
-                else:
-                    reordered_action_groups.insert(0, group)
-            args._action_groups = reordered_action_groups
-
-    # Examine the environment for variables that match known flags.
-    # For a flag called --example_flag the corresponding environment
-    # variable would be called EXAMPLE_FLAG.
     usage_message = args.format_usage()
     optional = False
     var = ''
@@ -218,10 +202,10 @@ def parse(entry_module: Optional[str]) -> Dict[str, Any]:
                             sys.argv.append(value)
                 var = ''
                 env = ''
-        else:
-            next
 
-    # Look for loadfile and read/parse it if present.
+
+def augment_sys_argv_from_loadfile():
+    global saved_messages
     loadfile = None
     saw_other_args = False
     grab_next_arg = False
@@ -238,21 +222,51 @@ def parse(entry_module: Optional[str]) -> Dict[str, Any]:
             saw_other_args = True
 
     if loadfile is not None:
-        if saw_other_args:
-            msg = f'Augmenting commandline arguments with those from {loadfile}.'
-            print(msg, file=sys.stderr)
-            saved_messages.append(msg)
         if not os.path.exists(loadfile):
-            print(
-                f'ERROR: --config_loadfile argument must be a file, {loadfile} not found.',
-                file=sys.stderr,
+            raise Exception(
+                f'ERROR: --config_loadfile argument must be a file, {loadfile} not found.'
             )
-            sys.exit(-1)
+        if saw_other_args:
+            msg = f'Augmenting commandline arguments with those from {loadfile}.'
+        else:
+            msg = f'Reading commandline arguments from {loadfile}.'
+        print(msg, file=sys.stderr)
+        saved_messages.append(msg)
+
         with open(loadfile, 'r') as rf:
             newargs = rf.readlines()
         newargs = [arg.strip('\n') for arg in newargs if 'config_savefile' not in arg]
         sys.argv += newargs
 
+
+def parse(entry_module: Optional[str]) -> Dict[str, Any]:
+    """Main program should call this early in main().  Note that the
+    bootstrap.initialize wrapper takes care of this automatically.
+
+    """
+    global config_parse_called
+    if config_parse_called:
+        return config
+    global saved_messages
+
+    # If we're about to do the usage message dump, put the main
+    # module's argument group last in the list (if possible) so that
+    # when the user passes -h or --help, it will be visible on the
+    # screen w/o scrolling.
+    for arg in sys.argv:
+        if arg == '--help' or arg == '-h':
+            args._action_groups = reorder_arg_action_groups(entry_module)
+
+    # Examine the environment for variables that match known flags.
+    # For a flag called --example_flag the corresponding environment
+    # variable would be called EXAMPLE_FLAG.  If found, hackily add
+    # these into sys.argv to be parsed.
+    augment_sys_argv_from_environment_variables()
+
+    # Look for loadfile and read/parse it if present.  This also
+    # works by jamming these values onto sys.argv.
+    augment_sys_argv_from_loadfile()
+
     # Parse (possibly augmented, possibly completely overwritten)
     # commandline args with argparse normally and populate config.
     known, unknown = args.parse_known_args()
index 2d9d63b78f8543890bb25fa879667bcc59294af1..0c4694e1056790af1db9dc53a5966a5392310110 100644 (file)
@@ -396,6 +396,72 @@ class MillisecondAwareFormatter(logging.Formatter):
         return s
 
 
+def log_about_logging(
+    logger, default_logging_level, preexisting_handlers_count, fmt, facility_name
+):
+    level_name = logging._levelToName.get(
+        default_logging_level, str(default_logging_level)
+    )
+    logger.debug(f'Initialized global logging; default logging level is {level_name}.')
+    if (
+        config.config['logging_clear_preexisting_handlers']
+        and preexisting_handlers_count > 0
+    ):
+        msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
+        logger.warning(msg)
+    logger.debug(f'Logging format specification is "{fmt}"')
+    if config.config['logging_debug_threads']:
+        logger.debug(
+            '...Logging format spec captures tid/pid (--logging_debug_threads)'
+        )
+    if config.config['logging_debug_modules']:
+        logger.debug(
+            '...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
+        )
+    if config.config['logging_syslog']:
+        logger.debug(
+            f'Logging to syslog as {facility_name} with priority mapping based on level'
+        )
+    if config.config['logging_filename']:
+        logger.debug(f'Logging to filename {config.config["logging_filename"]}')
+        logger.debug(
+            f'...with {config.config["logging_filename_maxsize"]} bytes max file size.'
+        )
+        logger.debug(
+            f'...and {config.config["logging_filename_count"]} rotating backup file count.'
+        )
+    if config.config['logging_console']:
+        logger.debug('Logging to the console (stderr).')
+    if config.config['logging_info_is_print']:
+        logger.debug(
+            'Logging logger.info messages will be repeated on stdout (--logging_info_is_print)'
+        )
+    if config.config['logging_squelch_repeats']:
+        logger.debug(
+            'Logging code allowed to request repeated messages be squelched (--logging_squelch_repeats)'
+        )
+    else:
+        logger.debug(
+            'Logging code forbidden to request messages be squelched; all messages logged (--no_logging_squelch_repeats)'
+        )
+    if config.config['logging_probabilistically']:
+        logger.debug(
+            'Logging code is allowed to request probabilistic logging (--logging_probabilistically)'
+        )
+    else:
+        logger.debug(
+            'Logging code is forbidden to request probabilistic logging; messages always logged (--no_logging_probabilistically)'
+        )
+    if config.config['lmodule']:
+        logger.debug(
+            f'Logging dynamic per-module logging enabled (--lmodule={config.config["lmodule"]})'
+        )
+    if config.config['logging_captures_prints']:
+        logger.debug(
+            'Logging will capture printed data as logger.info messages (--logging_captures_prints)'
+        )
+
+
 def initialize_logging(logger=None) -> logging.Logger:
     global logging_initialized
     if logging_initialized:
@@ -437,6 +503,7 @@ def initialize_logging(logger=None) -> logging.Logger:
     if config.config['logging_debug_modules']:
         fmt = f'%(filename)s:%(funcName)s:%(lineno)s|{fmt}'
 
+    facility_name = None
     if config.config['logging_syslog']:
         if sys.platform not in ('win32', 'cygwin'):
             if config.config['logging_syslog_facility']:
@@ -505,9 +572,8 @@ def initialize_logging(logger=None) -> logging.Logger:
     logger.propagate = False
 
     if config.config['logging_captures_prints']:
-        import builtins
-
         global built_in_print
+        import builtins
 
         def print_and_also_log(*arg, **kwarg):
             f = kwarg.get('file', None)
@@ -521,68 +587,13 @@ def initialize_logging(logger=None) -> logging.Logger:
 
     # At this point the logger is ready, handlers are set up,
     # etc... so log about the logging configuration.
-
-    level_name = logging._levelToName.get(
-        default_logging_level, str(default_logging_level)
+    log_about_logging(
+        logger,
+        default_logging_level,
+        preexisting_handlers_count,
+        fmt,
+        facility_name,
     )
-    logger.debug(f'Initialized global logging; default logging level is {level_name}.')
-    if (
-        config.config['logging_clear_preexisting_handlers']
-        and preexisting_handlers_count > 0
-    ):
-        msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
-        logger.warning(msg)
-    logger.debug(f'Logging format specification is "{fmt}"')
-    if config.config['logging_debug_threads']:
-        logger.debug(
-            '...Logging format spec captures tid/pid (--logging_debug_threads)'
-        )
-    if config.config['logging_debug_modules']:
-        logger.debug(
-            '...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
-        )
-    if config.config['logging_syslog']:
-        logger.debug(
-            f'Logging to syslog as {facility_name} with priority mapping based on level'
-        )
-    if config.config['logging_filename']:
-        logger.debug(f'Logging to filename {config.config["logging_filename"]}')
-        logger.debug(
-            f'...with {config.config["logging_filename_maxsize"]} bytes max file size.'
-        )
-        logger.debug(
-            f'...and {config.config["logging_filename_count"]} rotating backup file count.'
-        )
-    if config.config['logging_console']:
-        logger.debug('Logging to the console (stderr).')
-    if config.config['logging_info_is_print']:
-        logger.debug(
-            'Logging logger.info messages will be repeated on stdout (--logging_info_is_print)'
-        )
-    if config.config['logging_squelch_repeats']:
-        logger.debug(
-            'Logging code allowed to request repeated messages be squelched (--logging_squelch_repeats)'
-        )
-    else:
-        logger.debug(
-            'Logging code forbidden to request messages be squelched; all messages logged (--no_logging_squelch_repeats)'
-        )
-    if config.config['logging_probabilistically']:
-        logger.debug(
-            'Logging code is allowed to request probabilistic logging (--logging_probabilistically)'
-        )
-    else:
-        logger.debug(
-            'Logging code is forbidden to request probabilistic logging; messages always logged (--no_logging_probabilistically)'
-        )
-    if config.config['lmodule']:
-        logger.debug(
-            f'Logging dynamic per-module logging enabled (--lmodule={config.config["lmodule"]})'
-        )
-    if config.config['logging_captures_prints']:
-        logger.debug(
-            'Logging will capture printed data as logger.info messages (--logging_captures_prints)'
-        )
     return logger
 
 
@@ -628,18 +639,20 @@ class OutputMultiplexer(object):
         """Bits in the destination_bitv bitvector.  Used to indicate the
         output destination."""
 
-        LOG_DEBUG = 0x01  #  ⎫
-        LOG_INFO = 0x02  #  ⎪
-        LOG_WARNING = 0x04  #  ⎬ Must provide logger to the c'tor.
-        LOG_ERROR = 0x08  #  ⎪
+        # fmt: off
+        LOG_DEBUG = 0x01     #  ⎫
+        LOG_INFO = 0x02      #  ⎪
+        LOG_WARNING = 0x04   #  ⎬ Must provide logger to the c'tor.
+        LOG_ERROR = 0x08     #  ⎪
         LOG_CRITICAL = 0x10  #  ⎭
-        FILENAMES = 0x20  # Must provide a filename to the c'tor.
-        FILEHANDLES = 0x40  # Must provide a handle to the c'tor.
+        FILENAMES = 0x20     # Must provide a filename to the c'tor.
+        FILEHANDLES = 0x40   # Must provide a handle to the c'tor.
         HLOG = 0x80
         ALL_LOG_DESTINATIONS = (
             LOG_DEBUG | LOG_INFO | LOG_WARNING | LOG_ERROR | LOG_CRITICAL
         )
         ALL_OUTPUT_DESTINATIONS = 0x8F
+        # fmt: on
 
     def __init__(
         self,
index 4723a2db0679e5f866f14bbb723c66391fa06ae6..22de26395bcf188a3300ecfdeec193246d233ded 100755 (executable)
@@ -499,7 +499,7 @@ class ProfanityFilter(object):
 
     def tokenize(self, text: str):
         for x in nltk.word_tokenize(text):
-            for y in re.split('\W+', x):
+            for y in re.split(r'\W+', x):
                 yield y
 
     def contains_bad_word(self, text: str) -> bool: