From e8fbbb7306430478dec55d2c963eed116d8330cc Mon Sep 17 00:00:00 2001 From: Scott Gasch Date: Tue, 8 Feb 2022 17:46:56 -0800 Subject: [PATCH] More cleanup, yey! --- function_utils.py | 2 ++ google_assistant.py | 13 ++++++++----- histogram.py | 5 ++++- id_generator.py | 5 ++++- input_utils.py | 2 +- letter_compress.py | 8 ++++---- list_utils.py | 12 +++++------- lockfile.py | 24 +++++++++++++++--------- logging_utils.py | 21 ++++++++++----------- logical_search.py | 5 +++++ math_utils.py | 6 ++++-- misc_utils.py | 2 ++ parallelize.py | 2 ++ persistent.py | 12 ++++++++---- profanity_filter.py | 17 +++++++++++------ remote_worker.py | 22 ++++++++++------------ site_config.py | 6 +++++- smart_future.py | 23 +++++++++++------------ state_tracker.py | 16 +++++++++++----- stopwatch.py | 2 ++ string_utils.py | 25 ++++++++++++------------- text_utils.py | 8 +++++--- thread_utils.py | 6 ++++-- type_utils.py | 2 ++ unittest_utils.py | 29 +++++++++++++++++++---------- unscrambler.py | 12 +++++++----- 26 files changed, 173 insertions(+), 114 deletions(-) diff --git a/function_utils.py b/function_utils.py index 3c8e4ae..f107762 100644 --- a/function_utils.py +++ b/function_utils.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Helper methods dealing with functions.""" + from typing import Callable diff --git a/google_assistant.py b/google_assistant.py index b0aabf3..4a3a58f 100644 --- a/google_assistant.py +++ b/google_assistant.py @@ -1,7 +1,9 @@ #!/usr/bin/env python3 +"""A module to serve as a local client library around HTTP calls to +the Google Assistant via a local gateway.""" + import logging -import sys import warnings from typing import NamedTuple, Optional @@ -33,6 +35,8 @@ parser.add_argument( class GoogleResponse(NamedTuple): + """A response wrapper.""" + success: bool response: str audio_url: str @@ -57,7 +61,7 @@ def ask_google(cmd: str, *, recognize_speech=True) -> GoogleResponse: is True, perform speech recognition on the audio response from Google so as to translate it into text (best effort, YMMV). """ - logging.debug(f"Asking google: '{cmd}'") + logging.debug("Asking google: '%s'", cmd) payload = { "command": cmd, "user": config.config['google_assistant_username'], @@ -76,7 +80,7 @@ def ask_google(cmd: str, *, recognize_speech=True) -> GoogleResponse: if success: logger.debug('Google request succeeded.') if len(response) > 0: - logger.debug(f"Google said: '{response}'") + logger.debug("Google said: '%s'", response) audio = f"{config.config['google_assistant_bridge']}{j['audio']}" if recognize_speech: recognizer = sr.Recognizer() @@ -92,7 +96,7 @@ def ask_google(cmd: str, *, recognize_speech=True) -> GoogleResponse: audio_transcription = recognizer.recognize_google( speech, ) - logger.debug(f"Transcription: '{audio_transcription}'") + logger.debug("Transcription: '%s'", audio_transcription) except sr.UnknownValueError as e: logger.exception(e) msg = 'Unable to parse Google assistant\'s response.' @@ -114,4 +118,3 @@ def ask_google(cmd: str, *, recognize_speech=True) -> GoogleResponse: audio_url=audio, audio_transcription=audio_transcription, ) - sys.exit(-1) diff --git a/histogram.py b/histogram.py index c673e16..f85abea 100644 --- a/histogram.py +++ b/histogram.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +"""A text-based simple histogram helper class.""" + import math -from numbers import Number from typing import Dict, Generic, Iterable, List, Optional, Tuple, TypeVar T = TypeVar("T", int, float) @@ -11,6 +12,8 @@ Count = int class SimpleHistogram(Generic[T]): + """A simple histogram.""" + # Useful in defining wide open bottom/top bucket bounds: POSITIVE_INFINITY = math.inf NEGATIVE_INFINITY = -math.inf diff --git a/id_generator.py b/id_generator.py index d4c7016..dc2ac9c 100644 --- a/id_generator.py +++ b/id_generator.py @@ -1,5 +1,8 @@ #!/usr/bin/env python3 +"""A helper class for generating thread safe monotonically increasing +id numbers.""" + import itertools import logging @@ -28,7 +31,7 @@ def get(name: str, *, start=0) -> int: if name not in generators: generators[name] = itertools.count(start, 1) x = next(generators[name]) - logger.debug(f"Generated next id {x}") + logger.debug("Generated next id %d", x) return x diff --git a/input_utils.py b/input_utils.py index a166d7a..7d5e180 100644 --- a/input_utils.py +++ b/input_utils.py @@ -37,7 +37,7 @@ def single_keystroke_response( try: while True: response = readchar.readchar() - logger.debug(f'Keystroke: {ord(response)}') + logger.debug('Keystroke: 0x%x', ord(response)) if response in valid_responses: break if ord(response) in os_special_keystrokes: diff --git a/letter_compress.py b/letter_compress.py index 9b4cf19..42f06da 100644 --- a/letter_compress.py +++ b/letter_compress.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""A simple compression helper for lowercase ascii text.""" + import bitstring from collect.bidict import BiDict @@ -32,14 +34,12 @@ def compress(uncompressed: str) -> bytes: """ compressed = bitstring.BitArray() - for (n, letter) in enumerate(uncompressed): + for letter in uncompressed: if 'a' <= letter <= 'z': bits = ord(letter) - ord('a') + 1 # 1..26 else: if letter not in special_characters: - raise Exception( - f'"{uncompressed}" contains uncompressable char="{letter}"' - ) + raise Exception(f'"{uncompressed}" contains uncompressable char="{letter}"') bits = special_characters[letter] compressed.append(f"uint:5={bits}") while len(compressed) % 8 != 0: diff --git a/list_utils.py b/list_utils.py index d70159a..91af8f9 100644 --- a/list_utils.py +++ b/list_utils.py @@ -1,8 +1,10 @@ #!/usr/bin/env python3 +"""Some useful(?) utilities for dealing with Lists.""" + from collections import Counter from itertools import islice -from typing import Any, Iterator, List, Mapping, Sequence, Tuple +from typing import Any, Iterator, List, Sequence, Tuple def shard(lst: List[Any], size: int) -> Iterator[Any]: @@ -230,9 +232,7 @@ def _permute(seq: str, path: str): yield from _permute(cdr, path + car) -def binary_search( - lst: Sequence[Any], target: Any, *, sanity_check=False -) -> Tuple[bool, int]: +def binary_search(lst: Sequence[Any], target: Any, *, sanity_check=False) -> Tuple[bool, int]: """Performs a binary search on lst (which must already be sorted). Returns a Tuple composed of a bool which indicates whether the target was found and an int which indicates the index closest to @@ -267,9 +267,7 @@ def binary_search( return _binary_search(lst, target, 0, len(lst) - 1) -def _binary_search( - lst: Sequence[Any], target: Any, low: int, high: int -) -> Tuple[bool, int]: +def _binary_search(lst: Sequence[Any], target: Any, low: int, high: int) -> Tuple[bool, int]: if high >= low: mid = (high + low) // 2 if lst[mid] == target: diff --git a/lockfile.py b/lockfile.py index 2907107..6993cb8 100644 --- a/lockfile.py +++ b/lockfile.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""File-based locking helper.""" + import datetime import json import logging @@ -26,11 +28,15 @@ logger = logging.getLogger(__name__) class LockFileException(Exception): + """An exception related to lock files.""" + pass @dataclass class LockFileContents: + """The contents we'll write to each lock file.""" + pid: int commandline: str expiration_timestamp: Optional[float] @@ -56,9 +62,10 @@ class LockFile(object): expiration_timestamp: Optional[float] = None, override_command: Optional[str] = None, ) -> None: - self.is_locked = False - self.lockfile = lockfile_path - self.override_command = override_command + self.is_locked: bool = False + self.lockfile: str = lockfile_path + self.locktime: Optional[int] = None + self.override_command: Optional[str] = override_command if do_signal_cleanup: signal.signal(signal.SIGINT, self._signal) signal.signal(signal.SIGTERM, self._signal) @@ -71,7 +78,7 @@ class LockFile(object): return not os.path.exists(self.lockfile) def try_acquire_lock_once(self) -> bool: - logger.debug(f"Trying to acquire {self.lockfile}.") + logger.debug("Trying to acquire %s.", self.lockfile) try: # Attempt to create the lockfile. These flags cause # os.open to raise an OSError if the file already @@ -81,13 +88,12 @@ class LockFile(object): contents = self._get_lockfile_contents() logger.debug(contents) f.write(contents) - logger.debug(f'Success; I own {self.lockfile}.') + logger.debug('Success; I own %s.', self.lockfile) self.is_locked = True return True except OSError: pass - msg = f'Could not acquire {self.lockfile}.' - logger.warning(msg) + logger.warning('Couldn\'t acquire %s.', self.lockfile) return False def acquire_with_retries( @@ -125,7 +131,7 @@ class LockFile(object): logger.warning(msg) raise LockFileException(msg) - def __exit__(self, type, value, traceback): + def __exit__(self, _, value, traceback): if self.locktime: ts = datetime.datetime.now().timestamp() duration = ts - self.locktime @@ -164,7 +170,7 @@ class LockFile(object): line = lines[0] line_dict = json.loads(line) contents = LockFileContents(**line_dict) - logger.debug(f'Blocking lock contents="{contents}"') + logger.debug('Blocking lock contents="%s"', contents) # Does the PID exist still? try: diff --git a/logging_utils.py b/logging_utils.py index ca15441..706a054 100644 --- a/logging_utils.py +++ b/logging_utils.py @@ -14,7 +14,7 @@ import random import sys from logging.config import fileConfig from logging.handlers import RotatingFileHandler, SysLogHandler -from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional +from typing import Any, Callable, Dict, Iterable, List, Optional import pytz from overrides import overrides @@ -175,8 +175,8 @@ cfg.add_argument( ), ) -built_in_print = print -logging_initialized = False +BUILT_IN_PRINT = print +LOGGING_INITIALIZED = False # A map from logging_callsite_id -> count of logged messages. @@ -390,7 +390,7 @@ class MillisecondAwareFormatter(logging.Formatter): s = ct.strftime(datefmt) else: t = ct.strftime("%Y-%m-%d %H:%M:%S") - s = "%s,%03d" % (t, record.msecs) + s = f"{t},{record.msecs:%03d}" return s @@ -454,10 +454,10 @@ def log_about_logging( def initialize_logging(logger=None) -> logging.Logger: - global logging_initialized - if logging_initialized: + global LOGGING_INITIALIZED + if LOGGING_INITIALIZED: return logging.getLogger() - logging_initialized = True + LOGGING_INITIALIZED = True if logger is None: logger = logging.getLogger() @@ -479,7 +479,7 @@ def initialize_logging(logger=None) -> logging.Logger: # Global default logging level (--logging_level) default_logging_level = getattr(logging, config.config['logging_level'].upper(), None) if not isinstance(default_logging_level, int): - raise ValueError('Invalid level: %s' % config.config['logging_level']) + raise ValueError(f'Invalid level: {config.config["logging_level"]}') if config.config['logging_format']: fmt = config.config['logging_format'] @@ -563,7 +563,6 @@ def initialize_logging(logger=None) -> logging.Logger: logger.propagate = False if config.config['logging_captures_prints']: - global built_in_print import builtins def print_and_also_log(*arg, **kwarg): @@ -572,7 +571,7 @@ def initialize_logging(logger=None) -> logging.Logger: logger.warning(*arg) else: logger.info(*arg) - built_in_print(*arg, **kwarg) + BUILT_IN_PRINT(*arg, **kwarg) builtins.print = print_and_also_log @@ -667,7 +666,7 @@ class OutputMultiplexer(object): self.h: Optional[List[Any]] = None if handles is not None: - self.h = [handle for handle in handles] + self.h = list(handles) else: if destination_bitv & OutputMultiplexer.Destination.FILEHANDLES: raise ValueError("Handle argument is required if bitv & FILEHANDLES") diff --git a/logical_search.py b/logical_search.py index 76c2f86..b55e689 100644 --- a/logical_search.py +++ b/logical_search.py @@ -1,5 +1,9 @@ #!/usr/bin/env python3 +"""This is a module concerned with the creation of and searching of a +corpus of documents. The corpus is held in memory for fast +searching.""" + from __future__ import annotations import enum @@ -12,6 +16,7 @@ class ParseError(Exception): """An error encountered while parsing a logical search expression.""" def __init__(self, message: str): + super().__init__() self.message = message diff --git a/math_utils.py b/math_utils.py index 3953ae5..37fcec5 100644 --- a/math_utils.py +++ b/math_utils.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Mathematical helpers.""" + import functools import math from heapq import heappop, heappush @@ -76,8 +78,8 @@ def truncate_float(n: float, decimals: int = 2): 3.141 """ - assert decimals > 0 and decimals < 10 - multiplier = 10 ** decimals + assert 0 < decimals < 10 + multiplier = 10**decimals return int(n * multiplier) / multiplier diff --git a/misc_utils.py b/misc_utils.py index 4979a3c..a73728a 100644 --- a/misc_utils.py +++ b/misc_utils.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Miscellaneous utilities.""" + import os diff --git a/parallelize.py b/parallelize.py index f2cfcbb..b2a1ced 100644 --- a/parallelize.py +++ b/parallelize.py @@ -10,6 +10,8 @@ from enum import Enum class Method(Enum): + """How should we parallelize; by threads, processes or remote workers?""" + THREAD = 1 PROCESS = 2 REMOTE = 3 diff --git a/persistent.py b/persistent.py index 119931b..c902313 100644 --- a/persistent.py +++ b/persistent.py @@ -1,5 +1,9 @@ #!/usr/bin/env python3 +"""A Persistent is just a class with a load and save method. This +module defines the Persistent base and a decorator that can be used to +create a persistent singleton that autoloads and autosaves.""" + import atexit import datetime import enum @@ -136,21 +140,21 @@ class persistent_autoloaded_singleton(object): # memory. if self.instance is not None: logger.debug( - f'Returning already instantiated singleton instance of {cls.__name__}.' + 'Returning already instantiated singleton instance of %s.', cls.__name__ ) return self.instance # Otherwise, try to load it from persisted state. was_loaded = False - logger.debug(f'Attempting to load {cls.__name__} from persisted state.') + logger.debug('Attempting to load %s from persisted state.', cls.__name__) self.instance = cls.load() if not self.instance: msg = 'Loading from cache failed.' logger.warning(msg) - logger.debug(f'Attempting to instantiate {cls.__name__} directly.') + logger.debug('Attempting to instantiate %s directly.', cls.__name__) self.instance = cls(*args, **kwargs) else: - logger.debug(f'Class {cls.__name__} was loaded from persisted state successfully.') + logger.debug('Class %s was loaded from persisted state successfully.', cls.__name__) was_loaded = True assert self.instance is not None diff --git a/profanity_filter.py b/profanity_filter.py index e5c9e11..37756ba 100755 --- a/profanity_filter.py +++ b/profanity_filter.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""A helper to identify and optionally obscure some bad words.""" + import logging import random import re @@ -17,6 +19,8 @@ logger = logging.getLogger(__name__) @decorator_utils.singleton class ProfanityFilter(object): + """A helper to identify and optionally obscure some bad words.""" + def __init__(self): self.bad_words = set( [ @@ -499,7 +503,8 @@ class ProfanityFilter(object): chunks = [self.stemmer.stem(word) for word in nltk.word_tokenize(result)] return ' '.join(chunks) - def tokenize(self, text: str): + @staticmethod + def tokenize(text: str): for x in nltk.word_tokenize(text): for y in re.split(r'\W+', x): yield y @@ -518,24 +523,24 @@ class ProfanityFilter(object): False """ - words = [word for word in self.tokenize(text)] + words = list(self.tokenize(text)) for word in words: if self.is_bad_word(word): - logger.debug(f'"{word}" is profanity') + logger.debug('"%s" is profanity', word) return True if len(words) > 1: for bigram in string_utils.ngrams_presplit(words, 2): bigram = ' '.join(bigram) if self.is_bad_word(bigram): - logger.debug(f'"{bigram}" is profanity') + logger.debug('"%s" is profanity', bigram) return True if len(words) > 2: for trigram in string_utils.ngrams_presplit(words, 3): trigram = ' '.join(trigram) if self.is_bad_word(trigram): - logger.debug(f'"{trigram}" is profanity') + logger.debug('"%s" is profanity', trigram) return True return False @@ -563,7 +568,7 @@ class ProfanityFilter(object): break return out - words = [x for x in self.tokenize(text)] + words = list(self.tokenize(text)) words.append('') words.append('') words.append('') diff --git a/remote_worker.py b/remote_worker.py index 82b80ea..75dfe8e 100755 --- a/remote_worker.py +++ b/remote_worker.py @@ -59,14 +59,12 @@ def watch_for_cancel(terminate_event: threading.Event) -> None: for ancestor in ancestors: name = ancestor.name() pid = ancestor.pid - logger.debug(f'Ancestor process {name} (pid={pid})') + logger.debug('Ancestor process %s (pid=%d)', name, pid) if 'ssh' in name.lower(): saw_sshd = True break if not saw_sshd: - logger.error( - 'Did not see sshd in our ancestors list?! Committing suicide.' - ) + logger.error('Did not see sshd in our ancestors list?! Committing suicide.') os.system('pstree') os.kill(os.getpid(), signal.SIGTERM) time.sleep(5.0) @@ -99,43 +97,43 @@ def main() -> None: if config.config['watch_for_cancel']: (thread, stop_thread) = watch_for_cancel() - logger.debug(f'Reading {in_file}.') + logger.debug('Reading %s.', in_file) try: with open(in_file, 'rb') as rb: serialized = rb.read() except Exception as e: logger.exception(e) - logger.critical(f'Problem reading {in_file}. Aborting.') + logger.critical('Problem reading %s. Aborting.', in_file) cleanup_and_exit(thread, stop_thread, 1) - logger.debug(f'Deserializing {in_file}.') + logger.debug('Deserializing %s', in_file) try: fun, args, kwargs = cloudpickle.loads(serialized) except Exception as e: logger.exception(e) - logger.critical(f'Problem deserializing {in_file}. Aborting.') + logger.critical('Problem deserializing %s. Aborting.', in_file) cleanup_and_exit(thread, stop_thread, 2) logger.debug('Invoking user code...') with Timer() as t: ret = fun(*args, **kwargs) - logger.debug(f'User code took {t():.1f}s') + logger.debug('User code took %.1fs', t()) logger.debug('Serializing results') try: serialized = cloudpickle.dumps(ret) except Exception as e: logger.exception(e) - logger.critical(f'Could not serialize result ({type(ret)}). Aborting.') + logger.critical('Could not serialize result (%s). Aborting.', type(ret)) cleanup_and_exit(thread, stop_thread, 3) - logger.debug(f'Writing {out_file}.') + logger.debug('Writing %s', out_file) try: with open(out_file, 'wb') as wb: wb.write(serialized) except Exception as e: logger.exception(e) - logger.critical(f'Error writing {out_file}. Aborting.') + logger.critical('Error writing %s. Aborting.', out_file) cleanup_and_exit(thread, stop_thread, 4) cleanup_and_exit(thread, stop_thread, 0) diff --git a/site_config.py b/site_config.py index fcf22a8..7f6410d 100644 --- a/site_config.py +++ b/site_config.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Location/site dependent data.""" + import logging import platform from dataclasses import dataclass @@ -27,6 +29,8 @@ args.add_argument( @dataclass class SiteConfig(object): + """The set of information specific to where the program is running.""" + location_name: str location: Location network: str @@ -129,7 +133,7 @@ def effective_location(location_override: Optional[str] = None) -> str: if location_override is None or location_override == 'NONE': location = this_location() else: - logger.debug(f'site_config\'s location_override was set to: {location_override}') + logger.debug('site_config\'s location_override was set to: %s', location_override) location = location_override return location diff --git a/smart_future.py b/smart_future.py index 460dcb9..9aa68f3 100644 --- a/smart_future.py +++ b/smart_future.py @@ -1,11 +1,15 @@ #!/usr/bin/env python3 +"""A future that can be treated like the result that it contains and +will not block until it is used. At that point, if the underlying +value is not yet available, it will block until it becomes +available.""" + from __future__ import annotations import concurrent import concurrent.futures as fut import logging -import traceback from typing import Callable, List, Set, TypeVar from overrides import overrides @@ -31,7 +35,7 @@ def wait_any( smart_future_by_real_future = {} completed_futures: Set[fut.Future] = set() for x in futures: - assert type(x) == SmartFuture + assert isinstance(x, SmartFuture) real_futures.append(x.wrapped_future) smart_future_by_real_future[x.wrapped_future] = x @@ -44,15 +48,12 @@ def wait_any( if log_exceptions and not f.cancelled(): exception = f.exception() if exception is not None: - logger.warning( - f'Future {id(f)} raised an unhandled exception and exited.' - ) + logger.warning('Future 0x%x raised an unhandled exception and exited.', id(f)) logger.exception(exception) raise exception yield smart_future_by_real_future[f] if callback is not None: callback() - return def wait_all( @@ -62,7 +63,7 @@ def wait_all( ) -> None: real_futures = [] for x in futures: - assert type(x) == SmartFuture + assert isinstance(x, SmartFuture) real_futures.append(x.wrapped_future) (done, not_done) = concurrent.futures.wait( @@ -73,9 +74,7 @@ def wait_all( if not f.cancelled(): exception = f.exception() if exception is not None: - logger.warning( - f'Future {id(f)} raised an unhandled exception and exited.' - ) + logger.warning('Future 0x%x raised an unhandled exception and exited.', id(f)) logger.exception(exception) raise exception assert len(done) == len(real_futures) @@ -91,7 +90,7 @@ class SmartFuture(DeferredOperand): """ def __init__(self, wrapped_future: fut.Future) -> None: - assert type(wrapped_future) == fut.Future + assert isinstance(wrapped_future, fut.Future) self.wrapped_future = wrapped_future self.id = id_generator.get("smart_future_id") @@ -104,5 +103,5 @@ class SmartFuture(DeferredOperand): # You shouldn't have to call this; instead, have a look at defining a # method on DeferredOperand base class. @overrides - def _resolve(self, *, timeout=None) -> T: + def _resolve(self, timeout=None) -> T: return self.wrapped_future.result(timeout) diff --git a/state_tracker.py b/state_tracker.py index e592315..b375f89 100644 --- a/state_tracker.py +++ b/state_tracker.py @@ -1,5 +1,10 @@ #!/usr/bin/env python3 +"""Several helpers to keep track of internal state via periodic +polling. StateTracker expects to be invoked periodically to maintain +state whereas the others automatically update themselves and, +optionally, expose an event for client code to wait on state changes.""" + import datetime import logging import threading @@ -35,6 +40,7 @@ class StateTracker(ABC): """ self.update_ids_to_update_secs = update_ids_to_update_secs self.last_reminder_ts: Dict[str, Optional[datetime.datetime]] = {} + self.now: Optional[datetime.datetime] = None for x in update_ids_to_update_secs.keys(): self.last_reminder_ts[x] = None @@ -80,13 +86,13 @@ class StateTracker(ABC): refresh_secs = self.update_ids_to_update_secs[update_id] last_run = self.last_reminder_ts[update_id] if last_run is None: # Never run before - logger.debug(f'id {update_id} has never been run; running it now') + logger.debug('id %s has never been run; running it now', update_id) self.update(update_id, self.now, self.last_reminder_ts[update_id]) self.last_reminder_ts[update_id] = self.now else: delta = self.now - last_run if delta.total_seconds() >= refresh_secs: # Is overdue? - logger.debug(f'id {update_id} is overdue; running it now') + logger.debug('id %s is overdue; running it now', update_id) self.update( update_id, self.now, @@ -114,7 +120,7 @@ class AutomaticStateTracker(StateTracker): logger.debug('pace_maker noticed event; shutting down') return self.heartbeat() - logger.debug(f'pace_maker is sleeping for {self.sleep_delay}s') + logger.debug('pace_maker is sleeping for %.1fs', self.sleep_delay) time.sleep(self.sleep_delay) def __init__( @@ -127,12 +133,12 @@ class AutomaticStateTracker(StateTracker): super().__init__(update_ids_to_update_secs) if override_sleep_delay is not None: - logger.debug(f'Overriding sleep delay to {override_sleep_delay}') + logger.debug('Overriding sleep delay to %.1f', override_sleep_delay) self.sleep_delay = override_sleep_delay else: periods_list = list(update_ids_to_update_secs.values()) self.sleep_delay = math_utils.gcd_float_sequence(periods_list) - logger.info(f'Computed sleep_delay={self.sleep_delay}') + logger.info('Computed sleep_delay=%.1f', self.sleep_delay) (thread, stop_event) = self.pace_maker() self.should_terminate = stop_event self.updater_thread = thread diff --git a/stopwatch.py b/stopwatch.py index c6c154c..fa4f2b5 100644 --- a/stopwatch.py +++ b/stopwatch.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""A simple stopwatch decorator / context for timing things.""" + import time from typing import Callable, Optional diff --git a/string_utils.py b/string_utils.py index 3c97ff7..d75c6ba 100644 --- a/string_utils.py +++ b/string_utils.py @@ -1091,7 +1091,6 @@ def valid_date(in_str: str) -> bool: """ True if the string represents a valid date. """ - import dateparse import dateparse.dateparse_utils as dp try: @@ -1113,7 +1112,7 @@ def to_datetime(in_str: str) -> Optional[datetime.datetime]: try: d = dp.DateParser() # type: ignore dt = d.parse(in_str) - if type(dt) == datetime.datetime: + if isinstance(dt, datetime.datetime): return dt except ValueError: msg = f'Unable to parse datetime {in_str}.' @@ -1372,7 +1371,7 @@ def make_contractions(txt: str) -> str: for second in second_list: # Disallow there're/where're. They're valid English # but sound weird. - if (first == 'there' or first == 'where') and second == 'a(re)': + if (first in ('there', 'where')) and second == 'a(re)': continue pattern = fr'\b({first})\s+{second}\b' @@ -1458,11 +1457,11 @@ def shuffle_columns_into_list( # Column specs map input lines' columns into outputs. # [col1, col2...] for spec in column_specs: - chunk = '' + hunk = '' for n in spec: - chunk = chunk + delim + input_lines[n] - chunk = chunk.strip(delim) - out.append(chunk) + hunk = hunk + delim + input_lines[n] + hunk = hunk.strip(delim) + out.append(hunk) return out @@ -1488,11 +1487,11 @@ def shuffle_columns_into_dict( # Column specs map input lines' columns into outputs. # "key", [col1, col2...] for spec in column_specs: - chunk = '' + hunk = '' for n in spec[1]: - chunk = chunk + delim + input_lines[n] - chunk = chunk.strip(delim) - out[spec[0]] = chunk + hunk = hunk + delim + input_lines[n] + hunk = hunk.strip(delim) + out[spec[0]] = hunk return out @@ -1517,9 +1516,9 @@ def to_ascii(x: str): b'1, 2, 3' """ - if type(x) is str: + if isinstance(x, str): return x.encode('ascii') - if type(x) is bytes: + if isinstance(x, bytes): return x raise Exception('to_ascii works with strings and bytes') diff --git a/text_utils.py b/text_utils.py index 76cc7e8..4384a1e 100644 --- a/text_utils.py +++ b/text_utils.py @@ -15,6 +15,8 @@ logger = logging.getLogger(__file__) class RowsColumns(NamedTuple): + """Row + Column""" + rows: int columns: int @@ -31,7 +33,7 @@ def get_console_rows_columns() -> RowsColumns: ).split() except Exception as e: logger.exception(e) - raise Exception('Can\'t determine console size?!') + raise Exception('Can\'t determine console size?!') from e return RowsColumns(int(rows), int(columns)) @@ -122,10 +124,10 @@ def sparkline(numbers: List[float]) -> Tuple[float, float, str]: barcount = len(_bar) min_num, max_num = min(numbers), max(numbers) span = max_num - min_num - sparkline = ''.join( + sline = ''.join( _bar[min([barcount - 1, int((n - min_num) / span * barcount)])] for n in numbers ) - return min_num, max_num, sparkline + return min_num, max_num, sline def distribute_strings( diff --git a/thread_utils.py b/thread_utils.py index 2216127..4db4cf6 100644 --- a/thread_utils.py +++ b/thread_utils.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Utilities for dealing with threads + threading.""" + import functools import logging import os @@ -104,7 +106,7 @@ def background_thread( kwargs=kwa, ) thread.start() - logger.debug(f'Started thread {thread.name} tid={thread.ident}') + logger.debug('Started thread "%s" tid=%d', thread.name, thread.ident) return (thread, should_terminate) return inner_wrapper @@ -163,7 +165,7 @@ def periodically_invoke( newargs = (should_terminate, *args) thread = threading.Thread(target=helper_thread, args=newargs, kwargs=kwargs) thread.start() - logger.debug(f'Started thread {thread.name} tid={thread.ident}') + logger.debug('Started thread "%s" tid=%d', thread.name, thread.ident) return (thread, should_terminate) return wrapper_repeat diff --git a/type_utils.py b/type_utils.py index 1584597..c2f432a 100644 --- a/type_utils.py +++ b/type_utils.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Utility functions for dealing with typing.""" + import logging from typing import Any, Optional diff --git a/unittest_utils.py b/unittest_utils.py index f229df7..70e588e 100644 --- a/unittest_utils.py +++ b/unittest_utils.py @@ -76,6 +76,9 @@ unittest.main = bootstrap.initialize(unittest.main) class PerfRegressionDataPersister(ABC): + """A base class for a signature dealing with persisting perf + regression data.""" + def __init__(self): pass @@ -93,7 +96,10 @@ class PerfRegressionDataPersister(ABC): class FileBasedPerfRegressionDataPersister(PerfRegressionDataPersister): + """A perf regression data persister that uses files.""" + def __init__(self, filename: str): + super().__init__() self.filename = filename self.traces_to_delete: List[str] = [] @@ -114,7 +120,10 @@ class FileBasedPerfRegressionDataPersister(PerfRegressionDataPersister): class DatabasePerfRegressionDataPersister(PerfRegressionDataPersister): + """A perf regression data persister that uses a database backend.""" + def __init__(self, dbspec: str): + super().__init__() self.dbspec = dbspec self.engine = sa.create_engine(self.dbspec) self.conn = self.engine.connect() @@ -131,10 +140,10 @@ class DatabasePerfRegressionDataPersister(PerfRegressionDataPersister): def save_performance_data(self, method_id: str, data: Dict[str, List[float]]): self.delete_performance_data(method_id) - for (method_id, perf_data) in data.items(): + for (mid, perf_data) in data.items(): sql = 'INSERT INTO runtimes_by_function (function, runtime) VALUES ' for perf in perf_data: - self.conn.execute(sql + f'("{method_id}", {perf});') + self.conn.execute(sql + f'("{mid}", {perf});') def delete_performance_data(self, method_id: str): sql = f'DELETE FROM runtimes_by_function WHERE function = "{method_id}"' @@ -168,8 +177,8 @@ def check_method_for_perf_regressions(func: Callable) -> Callable: func_id = function_utils.function_identifier(func) func_name = func.__name__ - logger.debug(f'Watching {func_name}\'s performance...') - logger.debug(f'Canonical function identifier = {func_id}') + logger.debug('Watching %s\'s performance...', func_name) + logger.debug('Canonical function identifier = "%s"', func_id) try: perfdb = helper.load_performance_data(func_id) @@ -195,15 +204,15 @@ def check_method_for_perf_regressions(func: Callable) -> Callable: hist = perfdb.get(func_id, []) if len(hist) < config.config['unittests_num_perf_samples']: hist.append(run_time) - logger.debug(f'Still establishing a perf baseline for {func_name}') + logger.debug('Still establishing a perf baseline for %s', func_name) else: stdev = statistics.stdev(hist) - logger.debug(f'For {func_name}, performance stdev={stdev}') + logger.debug('For %s, performance stdev=%.2f', func_name, stdev) slowest = hist[-1] - logger.debug(f'For {func_name}, slowest perf on record is {slowest:f}s') + logger.debug('For %s, slowest perf on record is %.2fs', func_name, slowest) limit = slowest + stdev * 4 - logger.debug(f'For {func_name}, max acceptable runtime is {limit:f}s') - logger.debug(f'For {func_name}, actual observed runtime was {run_time:f}s') + logger.debug('For %s, max acceptable runtime is %.2fs', func_name, limit) + logger.debug('For %s, actual observed runtime was %.2fs', func_name, run_time) if run_time > limit: msg = f'''{func_id} performance has regressed unacceptably. {slowest:f}s is the slowest runtime on record in {len(hist)} perf samples. @@ -250,7 +259,7 @@ def check_all_methods_for_perf_regressions(prefix='test_'): for name, m in inspect.getmembers(cls, inspect.isfunction): if name.startswith(prefix): setattr(cls, name, check_method_for_perf_regressions(m)) - logger.debug(f'Wrapping {cls.__name__}:{name}.') + logger.debug('Wrapping %s:%s.', cls.__name__, name) return cls return decorate_the_testcase diff --git a/unscrambler.py b/unscrambler.py index c5bc9b5..1b24230 100644 --- a/unscrambler.py +++ b/unscrambler.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""A fast word unscrambler library.""" + import logging from typing import Dict, Mapping, Optional @@ -21,10 +23,10 @@ cfg.add_argument( logger = logging.getLogger(__name__) letters_bits = 32 -letters_mask = 2 ** letters_bits - 1 +letters_mask = 2**letters_bits - 1 fprint_bits = 52 -fprint_mask = (2 ** fprint_bits - 1) << letters_bits +fprint_mask = (2**fprint_bits - 1) << letters_bits fprint_feature_bit = { 'e': 0, @@ -106,7 +108,7 @@ class Unscrambler(object): self.sigs = [] self.words = [] - filename = self.get_indexfile(indexfile) + filename = Unscrambler.get_indexfile(indexfile) with open(filename, 'r') as rf: lines = rf.readlines() for line in lines: @@ -116,7 +118,8 @@ class Unscrambler(object): self.sigs.append(isig) self.words.append(word) - def get_indexfile(self, indexfile: Optional[str]) -> str: + @staticmethod + def get_indexfile(indexfile: Optional[str]) -> str: if indexfile is None: if 'unscrambler_default_indexfile' in config.config: indexfile = config.config['unscramble_indexfile'] @@ -193,7 +196,6 @@ class Unscrambler(object): @staticmethod def repopulate( - lsigs: Dict[str, int], dictfile: str = '/usr/share/dict/words', indexfile: str = '/usr/share/dict/sparse_index', ) -> None: -- 2.47.1