Ahem. Still running black?
authorScott <[email protected]>
Thu, 27 Jan 2022 05:35:20 +0000 (21:35 -0800)
committerScott <[email protected]>
Thu, 27 Jan 2022 05:35:20 +0000 (21:35 -0800)
32 files changed:
acl.py
ansi.py
argparse_utils.py
arper.py
base_presence.py
bootstrap.py
camera_utils.py
config.py
conversion_utils.py
datetime_utils.py
decorator_utils.py
deferred_operand.py
dict_utils.py
directory_filter.py
exec_utils.py
executors.py
file_utils.py
google_assistant.py
histogram.py
lockfile.py
logging_utils.py
logical_search.py
parallelize.py
persistent.py
profanity_filter.py
site_config.py
state_tracker.py
text_utils.py
thread_utils.py
unittest_utils.py
unscrambler.py
waitable_presence.py

diff --git a/acl.py b/acl.py
index adec643d56bfd085f4ef5c30485bfa09d857d77b..0692a045308a4a3a30b3ff6af67047a1572dc4dc 100644 (file)
--- a/acl.py
+++ b/acl.py
@@ -28,9 +28,7 @@ class Order(enum.Enum):
 class SimpleACL(ABC):
     """A simple Access Control List interface."""
 
-    def __init__(
-        self, *, order_to_check_allow_deny: Order, default_answer: bool
-    ):
+    def __init__(self, *, order_to_check_allow_deny: Order, default_answer: bool):
         if order_to_check_allow_deny not in (
             Order.ALLOW_DENY,
             Order.DENY_ALLOW,
diff --git a/ansi.py b/ansi.py
index 5fde4af56c9ef18254cd562ca19b10d853597e93..9e31b811ab978fa1ae81c3974ba991700287d867 100755 (executable)
--- a/ansi.py
+++ b/ansi.py
@@ -1773,9 +1773,7 @@ def fg(
         green = 0
     if blue is None:
         blue = 0
-    if (
-        is_16color(red) and is_16color(green) and is_16color(blue)
-    ) or force_16color:
+    if (is_16color(red) and is_16color(green) and is_16color(blue)) or force_16color:
         logger.debug("Using 16-color strategy")
         return fg_16color(red, green, blue)
     if (
@@ -1878,9 +1876,7 @@ def bg(
         green = 0
     if blue is None:
         blue = 0
-    if (
-        is_16color(red) and is_16color(green) and is_16color(blue)
-    ) or force_16color:
+    if (is_16color(red) and is_16color(green) and is_16color(blue)) or force_16color:
         logger.debug("Using 16-color strategy")
         return bg_16color(red, green, blue)
     if (
index 8c254ae1f995f6aa684134c595e35968ea5b36cd..43536e460b7b4cc238c09d263bd36ae2b3b8f88e 100644 (file)
@@ -16,9 +16,7 @@ logger = logging.getLogger(__name__)
 
 
 class ActionNoYes(argparse.Action):
-    def __init__(
-        self, option_strings, dest, default=None, required=False, help=None
-    ):
+    def __init__(self, option_strings, dest, default=None, required=False, help=None):
         if default is None:
             msg = 'You must provide a default with Yes/No action'
             logger.critical(msg)
@@ -47,9 +45,7 @@ class ActionNoYes(argparse.Action):
 
     @overrides
     def __call__(self, parser, namespace, values, option_strings=None):
-        if option_strings.startswith('--no-') or option_strings.startswith(
-            '--no_'
-        ):
+        if option_strings.startswith('--no-') or option_strings.startswith('--no_'):
             setattr(namespace, self.dest, False)
         else:
             setattr(namespace, self.dest, True)
index 39aecf90bd438af92d69cffae6f4a3d44ea6aa0e..29a8a121870175c8a469c8158fe087b290f40fe0 100644 (file)
--- a/arper.py
+++ b/arper.py
@@ -131,10 +131,7 @@ class Arper(persistent.Persistent):
                     mac = mac.lower()
                     ip = ip.strip()
                     cached_state[mac] = ip
-            if (
-                len(cached_state)
-                > config.config['arper_min_entries_to_be_valid']
-            ):
+            if len(cached_state) > config.config['arper_min_entries_to_be_valid']:
                 return cls(cached_state)
             else:
                 msg = f'{cache_file} is invalid: only {len(cached_state)} entries.  Deleting it.'
@@ -147,12 +144,8 @@ class Arper(persistent.Persistent):
     @overrides
     def save(self) -> bool:
         if len(self.state) > config.config['arper_min_entries_to_be_valid']:
-            logger.debug(
-                f'Persisting state to {config.config["arper_cache_location"]}'
-            )
-            with file_utils.FileWriter(
-                config.config['arper_cache_location']
-            ) as wf:
+            logger.debug(f'Persisting state to {config.config["arper_cache_location"]}')
+            with file_utils.FileWriter(config.config['arper_cache_location']) as wf:
                 for (mac, ip) in self.state.items():
                     mac = mac.lower()
                     print(f'{mac}, {ip}', file=wf)
index f774dbce270dc0b9c13c409ca9019d3a8e921166..612193e1df3b84f0ee8fc52b86ab5c94ce7ee38d 100755 (executable)
@@ -85,9 +85,7 @@ class PresenceDetection(object):
             delta = now - self.last_update
             if (
                 delta.total_seconds()
-                > config.config[
-                    'presence_tolerable_staleness_seconds'
-                ].total_seconds()
+                > config.config['presence_tolerable_staleness_seconds'].total_seconds()
             ):
                 logger.debug(
                     f"It's been {delta.total_seconds()}s since last update; refreshing now."
@@ -146,9 +144,7 @@ class PresenceDetection(object):
             warnings.warn(msg, stacklevel=2)
             self.dark_locations.add(Location.HOUSE)
 
-    def read_persisted_macs_file(
-        self, filename: str, location: Location
-    ) -> None:
+    def read_persisted_macs_file(self, filename: str, location: Location) -> None:
         if location is Location.UNKNOWN:
             return
         with open(filename, "r") as rf:
@@ -177,9 +173,9 @@ class PresenceDetection(object):
                 logger.exception(e)
                 continue
             mac = mac.strip()
-            (self.location_ts_by_mac[location])[
-                mac
-            ] = datetime.datetime.fromtimestamp(int(ts.strip()))
+            (self.location_ts_by_mac[location])[mac] = datetime.datetime.fromtimestamp(
+                int(ts.strip())
+            )
             ip_name = ip_name.strip()
             match = re.match(r"(\d+\.\d+\.\d+\.\d+) +\(([^\)]+)\)", ip_name)
             if match is not None:
@@ -192,9 +188,7 @@ class PresenceDetection(object):
     def is_anyone_in_location_now(self, location: Location) -> bool:
         self.maybe_update()
         if location in self.dark_locations:
-            raise Exception(
-                f"Can't see {location} right now; answer undefined."
-            )
+            raise Exception(f"Can't see {location} right now; answer undefined.")
         for person in Person:
             if person is not None:
                 loc = self.where_is_person_now(person)
@@ -207,7 +201,9 @@ class PresenceDetection(object):
     def where_is_person_now(self, name: Person) -> Location:
         self.maybe_update()
         if len(self.dark_locations) > 0:
-            msg = f"Can't see {self.dark_locations} right now; answer confidence impacted"
+            msg = (
+                f"Can't see {self.dark_locations} right now; answer confidence impacted"
+            )
             logger.warning(msg)
             warnings.warn(msg, stacklevel=2)
         logger.debug(f'Looking for {name}...')
@@ -227,15 +223,11 @@ class PresenceDetection(object):
             if mac not in self.names_by_mac:
                 continue
             mac_name = self.names_by_mac[mac]
-            logger.debug(
-                f'Looking for {name}... check for mac {mac} ({mac_name})'
-            )
+            logger.debug(f'Looking for {name}... check for mac {mac} ({mac_name})')
             for location in self.location_ts_by_mac:
                 if mac in self.location_ts_by_mac[location]:
                     ts = (self.location_ts_by_mac[location])[mac]
-                    logger.debug(
-                        f'Seen {mac} ({mac_name}) at {location} since {ts}'
-                    )
+                    logger.debug(f'Seen {mac} ({mac_name}) at {location} since {ts}')
                     tiebreaks[location] = ts
 
             (
@@ -246,9 +238,7 @@ class PresenceDetection(object):
             v = votes.get(most_recent_location, 0)
             votes[most_recent_location] = v + bonus
             logger.debug(f'{name}: {location} gets {bonus} votes.')
-            credit = int(
-                credit * 0.2
-            )  # Note: list most important devices first
+            credit = int(credit * 0.2)  # Note: list most important devices first
             if credit <= 0:
                 credit = 1
         if len(votes) > 0:
index c3b70db106260bbc80b592f45ea1fc70e6df6254..44a16fbd3b3f99a20e47913f0428314b1cf5bcc8 100644 (file)
@@ -121,9 +121,7 @@ class ImportInterceptor(object):
                 loading_module = self.module_by_filename_cache[filename]
             else:
                 self.repopulate_modules_by_filename()
-                loading_module = self.module_by_filename_cache.get(
-                    filename, 'unknown'
-                )
+                loading_module = self.module_by_filename_cache.get(filename, 'unknown')
 
             path = self.tree_node_by_module.get(loading_module, [])
             path.extend([loaded_module])
@@ -264,9 +262,7 @@ def initialize(entry_point):
 
         with stopwatch.Timer() as t:
             ret = entry_point(*args, **kwargs)
-        logger.debug(
-            f'{entry_point.__name__} (program entry point) returned {ret}.'
-        )
+        logger.debug(f'{entry_point.__name__} (program entry point) returned {ret}.')
 
         if config.config['dump_all_objects']:
             dump_all_objects()
index 03ac621a52eb4d855ec3ac5458f59712e7eea657..d2c50ddf314d96402f885c10c6c79081569ca2c1 100644 (file)
@@ -74,9 +74,7 @@ def fetch_camera_image_from_video_server(
         response = requests.get(url, stream=False, timeout=10.0)
         if response.ok:
             raw = response.content
-            logger.debug(
-                f'Read {len(response.content)} byte image from HTTP server'
-            )
+            logger.debug(f'Read {len(response.content)} byte image from HTTP server')
             tmp = np.frombuffer(raw, dtype="uint8")
             logger.debug(
                 f'Translated raw content into {tmp.shape} {type(tmp)} with element type {type(tmp[0])}.'
@@ -172,9 +170,7 @@ def _fetch_camera_image(
         camera_name, width=width, quality=quality
     )
     if raw is None:
-        logger.debug(
-            "Reading from video server failed; trying direct RTSP stream"
-        )
+        logger.debug("Reading from video server failed; trying direct RTSP stream")
         raw = fetch_camera_image_from_rtsp_stream(camera_name, width=width)
     if raw is not None and len(raw) > 0:
         tmp = np.frombuffer(raw, dtype="uint8")
@@ -185,9 +181,7 @@ def _fetch_camera_image(
             jpg=jpg,
             hsv=hsv,
         )
-    msg = (
-        "Failed to retieve image from both video server and direct RTSP stream"
-    )
+    msg = "Failed to retieve image from both video server and direct RTSP stream"
     logger.warning(msg)
     warnings.warn(msg, stacklevel=2)
     return RawJpgHsv(None, None, None)
index a608cf5101f86a046ab6759267772510e1fe020e..0edb16959bdd0c74bbb10c9556857fc498455485 100644 (file)
--- a/config.py
+++ b/config.py
@@ -239,9 +239,7 @@ def parse(entry_module: Optional[str]) -> Dict[str, Any]:
 
     if loadfile is not None:
         if saw_other_args:
-            msg = (
-                f'Augmenting commandline arguments with those from {loadfile}.'
-            )
+            msg = f'Augmenting commandline arguments with those from {loadfile}.'
             print(msg, file=sys.stderr)
             saved_messages.append(msg)
         if not os.path.exists(loadfile):
@@ -252,9 +250,7 @@ def parse(entry_module: Optional[str]) -> Dict[str, Any]:
             sys.exit(-1)
         with open(loadfile, 'r') as rf:
             newargs = rf.readlines()
-        newargs = [
-            arg.strip('\n') for arg in newargs if 'config_savefile' not in arg
-        ]
+        newargs = [arg.strip('\n') for arg in newargs if 'config_savefile' not in arg]
         sys.argv += newargs
 
     # Parse (possibly augmented, possibly completely overwritten)
index 43268400021443344809b59dd8ced0fb91b9de18..684edc0a9116aa827db2c7c83a52de10dbbba73b 100644 (file)
@@ -86,9 +86,7 @@ conversion_catalog = {
         lambda c: c * 1.8 + 32.0,
         "°F",
     ),
-    "Celsius": Converter(
-        "Celsius", "temperature", lambda c: c, lambda c: c, "°C"
-    ),
+    "Celsius": Converter("Celsius", "temperature", lambda c: c, lambda c: c, "°C"),
     "Kelvin": Converter(
         "Kelvin",
         "temperature",
@@ -109,9 +107,7 @@ def convert(magnitude: Number, from_thing: str, to_thing: str) -> float:
     return _convert(magnitude, src, dst)
 
 
-def _convert(
-    magnitude: Number, from_unit: Converter, to_unit: Converter
-) -> float:
+def _convert(magnitude: Number, from_unit: Converter, to_unit: Converter) -> float:
     canonical = from_unit.to_canonical(magnitude)
     converted = to_unit.from_canonical(canonical)
     return float(converted)
index 97947203f6a126348d1b6602c320b07909f83970..3565936fce66c1197a04a8926f902452e6350ac4 100644 (file)
@@ -34,9 +34,7 @@ def is_timezone_naive(dt: datetime.datetime) -> bool:
     return not is_timezone_aware(dt)
 
 
-def replace_timezone(
-    dt: datetime.datetime, tz: datetime.tzinfo
-) -> datetime.datetime:
+def replace_timezone(dt: datetime.datetime, tz: datetime.tzinfo) -> datetime.datetime:
     """
     Replaces the timezone on a datetime object directly (leaving
     the year, month, day, hour, minute, second, micro, etc... alone).
@@ -66,9 +64,7 @@ def replace_timezone(
     )
 
 
-def replace_time_timezone(
-    t: datetime.time, tz: datetime.tzinfo
-) -> datetime.time:
+def replace_time_timezone(t: datetime.time, tz: datetime.tzinfo) -> datetime.time:
     """
     Replaces the timezone on a datetime.time directly without performing
     any translation.
@@ -85,9 +81,7 @@ def replace_time_timezone(
     return t.replace(tzinfo=tz)
 
 
-def translate_timezone(
-    dt: datetime.datetime, tz: datetime.tzinfo
-) -> datetime.datetime:
+def translate_timezone(dt: datetime.datetime, tz: datetime.tzinfo) -> datetime.datetime:
     """
     Translates dt into a different timezone by adjusting the year, month,
     day, hour, minute, second, micro, etc... appropriately.  The returned
index daae64e75348e973dc8a27cf387faf7f404ef2b2..1ecbce3e2b10003ecf315cbd9db0e04168860d49 100644 (file)
@@ -80,9 +80,7 @@ def invocation_logged(func: Callable) -> Callable:
     return wrapper_invocation_logged
 
 
-def rate_limited(
-    n_calls: int, *, per_period_in_seconds: float = 1.0
-) -> Callable:
+def rate_limited(n_calls: int, *, per_period_in_seconds: float = 1.0) -> Callable:
     """Limit invocation of a wrapped function to n calls per period.
     Thread safe.  In testing this was relatively fair with multiple
     threads using it though that hasn't been measured.
@@ -220,9 +218,7 @@ def debug_count_calls(func: Callable) -> Callable:
     @functools.wraps(func)
     def wrapper_debug_count_calls(*args, **kwargs):
         wrapper_debug_count_calls.num_calls += 1
-        msg = (
-            f"Call #{wrapper_debug_count_calls.num_calls} of {func.__name__!r}"
-        )
+        msg = f"Call #{wrapper_debug_count_calls.num_calls} of {func.__name__!r}"
         print(msg)
         logger.info(msg)
         return func(*args, **kwargs)
@@ -266,15 +262,11 @@ def delay(
         @functools.wraps(func)
         def wrapper_delay(*args, **kwargs):
             if when & DelayWhen.BEFORE_CALL:
-                logger.debug(
-                    f"@delay for {seconds}s BEFORE_CALL to {func.__name__}"
-                )
+                logger.debug(f"@delay for {seconds}s BEFORE_CALL to {func.__name__}")
                 time.sleep(seconds)
             retval = func(*args, **kwargs)
             if when & DelayWhen.AFTER_CALL:
-                logger.debug(
-                    f"@delay for {seconds}s AFTER_CALL to {func.__name__}"
-                )
+                logger.debug(f"@delay for {seconds}s AFTER_CALL to {func.__name__}")
                 time.sleep(seconds)
             return retval
 
@@ -368,9 +360,7 @@ def memoized(func: Callable) -> Callable:
         cache_key = args + tuple(kwargs.items())
         if cache_key not in wrapper_memoized.cache:
             value = func(*args, **kwargs)
-            logger.debug(
-                f"Memoizing {cache_key} => {value} for {func.__name__}"
-            )
+            logger.debug(f"Memoizing {cache_key} => {value} for {func.__name__}")
             wrapper_memoized.cache[cache_key] = value
         else:
             logger.debug(f"Returning memoized value for {func.__name__}")
@@ -760,9 +750,7 @@ def call_with_sample_rate(sample_rate: float) -> Callable:
             if random.uniform(0, 1) < sample_rate:
                 return f(*args, **kwargs)
             else:
-                logger.debug(
-                    f"@call_with_sample_rate skipping a call to {f.__name__}"
-                )
+                logger.debug(f"@call_with_sample_rate skipping a call to {f.__name__}")
 
         return _call_with_sample_rate
 
index 22bcb83297fa19aa24ab3d860d81a39c22c63dac..75e98d923d5bcccc135e4d5c05415c7949fac8a9 100644 (file)
@@ -91,9 +91,7 @@ class DeferredOperand(ABC, Generic[T]):
         return DeferredOperand.resolve(self) is DeferredOperand.resolve(other)
 
     def is_not(self, other):
-        return DeferredOperand.resolve(self) is not DeferredOperand.resolve(
-            other
-        )
+        return DeferredOperand.resolve(self) is not DeferredOperand.resolve(other)
 
     def __abs__(self):
         return abs(DeferredOperand.resolve(self))
@@ -151,8 +149,6 @@ class DeferredOperand(ABC, Generic[T]):
 
     def __getattr__(self, method_name):
         def method(*args, **kwargs):
-            return getattr(DeferredOperand.resolve(self), method_name)(
-                *args, **kwargs
-            )
+            return getattr(DeferredOperand.resolve(self), method_name)(*args, **kwargs)
 
         return method
index 79c86edf286f2c9ea9983906385365199be74892..b1464c6bb9967ce4efa48318a288babd0ff322e9 100644 (file)
@@ -198,9 +198,7 @@ def min_key(d: Dict[Any, Any]) -> Any:
     return min(d.keys())
 
 
-def parallel_lists_to_dict(
-    keys: List[Any], values: List[Any]
-) -> Dict[Any, Any]:
+def parallel_lists_to_dict(keys: List[Any], values: List[Any]) -> Dict[Any, Any]:
     """Given two parallel lists (keys and values), create and return
     a dict.
 
@@ -211,9 +209,7 @@ def parallel_lists_to_dict(
 
     """
     if len(keys) != len(values):
-        raise Exception(
-            "Parallel keys and values lists must have the same length"
-        )
+        raise Exception("Parallel keys and values lists must have the same length")
     return dict(zip(keys, values))
 
 
index 8d03ff603d425bddc24267061a902f67ac7bb3b1..508baf3bc888cf49e832ca72feddeda1797890a0 100644 (file)
@@ -57,9 +57,7 @@ class DirectoryFileFilter(object):
             mtime = file_utils.get_file_raw_mtime(filename)
         if self.mtime_by_filename.get(filename, 0) != mtime:
             md5 = file_utils.get_file_md5(filename)
-            logger.debug(
-                f'Computed/stored {filename}\'s MD5 at ts={mtime} ({md5})'
-            )
+            logger.debug(f'Computed/stored {filename}\'s MD5 at ts={mtime} ({md5})')
             self.mtime_by_filename[filename] = mtime
             self.md5_by_filename[filename] = md5
 
index 016310793152ddeb8872f5ec279c26ae1994655e..282a325a461e289144b5a58b5a88ce4a90098c83 100644 (file)
@@ -68,9 +68,7 @@ def cmd_with_timeout(command: str, timeout_seconds: Optional[float]) -> int:
     subprocess.TimeoutExpired: Command '['/bin/bash', '-c', '/bin/sleep 2']' timed out after 0.1 seconds
 
     """
-    return subprocess.check_call(
-        ["/bin/bash", "-c", command], timeout=timeout_seconds
-    )
+    return subprocess.check_call(["/bin/bash", "-c", command], timeout=timeout_seconds)
 
 
 def cmd(command: str, timeout_seconds: Optional[float] = None) -> str:
@@ -120,9 +118,7 @@ def run_silently(command: str, timeout_seconds: Optional[float] = None) -> None:
     )
 
 
-def cmd_in_background(
-    command: str, *, silent: bool = False
-) -> subprocess.Popen:
+def cmd_in_background(command: str, *, silent: bool = False) -> subprocess.Popen:
     args = shlex.split(command)
     if silent:
         subproc = subprocess.Popen(
@@ -137,9 +133,7 @@ def cmd_in_background(
     def kill_subproc() -> None:
         try:
             if subproc.poll() is None:
-                logger.info(
-                    "At exit handler: killing {}: {}".format(subproc, command)
-                )
+                logger.info("At exit handler: killing {}: {}".format(subproc, command))
                 subproc.terminate()
                 subproc.wait(timeout=10.0)
         except BaseException as be:
index 46812c2b49203c2b23c021978e8e6fe334b80afa..453139a5fbd514525a8152a81cb2bd0673e9acc0 100644 (file)
@@ -160,9 +160,7 @@ class ProcessExecutor(BaseExecutor):
         self.adjust_task_count(+1)
         pickle = make_cloud_pickle(function, *args, **kwargs)
         result = self._process_executor.submit(self.run_cloud_pickle, pickle)
-        result.add_done_callback(
-            lambda _: self.histogram.add_item(time.time() - start)
-        )
+        result.add_done_callback(lambda _: self.histogram.add_item(time.time() - start))
         return result
 
     @overrides
@@ -258,9 +256,7 @@ class RemoteExecutorStatus:
         self.finished_bundle_timings_per_worker: Dict[
             RemoteWorkerRecord, List[float]
         ] = {}
-        self.in_flight_bundles_by_worker: Dict[
-            RemoteWorkerRecord, Set[str]
-        ] = {}
+        self.in_flight_bundles_by_worker: Dict[RemoteWorkerRecord, Set[str]] = {}
         self.bundle_details_by_uuid: Dict[str, BundleDetails] = {}
         self.finished_bundle_timings: List[float] = []
         self.last_periodic_dump: Optional[float] = None
@@ -270,9 +266,7 @@ class RemoteExecutorStatus:
         # as a memory fence for modifications to bundle.
         self.lock: threading.Lock = threading.Lock()
 
-    def record_acquire_worker(
-        self, worker: RemoteWorkerRecord, uuid: str
-    ) -> None:
+    def record_acquire_worker(self, worker: RemoteWorkerRecord, uuid: str) -> None:
         with self.lock:
             self.record_acquire_worker_already_locked(worker, uuid)
 
@@ -290,9 +284,7 @@ class RemoteExecutorStatus:
         with self.lock:
             self.record_bundle_details_already_locked(details)
 
-    def record_bundle_details_already_locked(
-        self, details: BundleDetails
-    ) -> None:
+    def record_bundle_details_already_locked(self, details: BundleDetails) -> None:
         assert self.lock.locked()
         self.bundle_details_by_uuid[details.uuid] = details
 
@@ -303,9 +295,7 @@ class RemoteExecutorStatus:
         was_cancelled: bool,
     ) -> None:
         with self.lock:
-            self.record_release_worker_already_locked(
-                worker, uuid, was_cancelled
-            )
+            self.record_release_worker_already_locked(worker, uuid, was_cancelled)
 
     def record_release_worker_already_locked(
         self,
@@ -377,11 +367,7 @@ class RemoteExecutorStatus:
                 ret += f'    ...{in_flight} bundles currently in flight:\n'
                 for bundle_uuid in self.in_flight_bundles_by_worker[worker]:
                     details = self.bundle_details_by_uuid.get(bundle_uuid, None)
-                    pid = (
-                        str(details.pid)
-                        if (details and details.pid != 0)
-                        else "TBD"
-                    )
+                    pid = str(details.pid) if (details and details.pid != 0) else "TBD"
                     if self.start_per_bundle[bundle_uuid] is not None:
                         sec = ts - self.start_per_bundle[bundle_uuid]
                         ret += f'       (pid={pid}): {details} for {sec:.1f}s so far '
@@ -412,10 +398,7 @@ class RemoteExecutorStatus:
         assert self.lock.locked()
         self.total_bundles_submitted = total_bundles_submitted
         ts = time.time()
-        if (
-            self.last_periodic_dump is None
-            or ts - self.last_periodic_dump > 5.0
-        ):
+        if self.last_periodic_dump is None or ts - self.last_periodic_dump > 5.0:
             print(self)
             self.last_periodic_dump = ts
 
@@ -429,9 +412,7 @@ class RemoteWorkerSelectionPolicy(ABC):
         pass
 
     @abstractmethod
-    def acquire_worker(
-        self, machine_to_avoid=None
-    ) -> Optional[RemoteWorkerRecord]:
+    def acquire_worker(self, machine_to_avoid=None) -> Optional[RemoteWorkerRecord]:
         pass
 
 
@@ -444,9 +425,7 @@ class WeightedRandomRemoteWorkerSelectionPolicy(RemoteWorkerSelectionPolicy):
         return False
 
     @overrides
-    def acquire_worker(
-        self, machine_to_avoid=None
-    ) -> Optional[RemoteWorkerRecord]:
+    def acquire_worker(self, machine_to_avoid=None) -> Optional[RemoteWorkerRecord]:
         grabbag = []
         for worker in self.workers:
             for x in range(0, worker.count):
@@ -585,9 +564,7 @@ class RemoteExecutor(BaseExecutor):
                             break
 
                     for uuid in bundle_uuids:
-                        bundle = self.status.bundle_details_by_uuid.get(
-                            uuid, None
-                        )
+                        bundle = self.status.bundle_details_by_uuid.get(uuid, None)
                         if (
                             bundle is not None
                             and bundle.src_bundle is None
@@ -678,9 +655,7 @@ class RemoteExecutor(BaseExecutor):
         logger.critical(msg)
         raise Exception(msg)
 
-    def release_worker(
-        self, bundle: BundleDetails, *, was_cancelled=True
-    ) -> None:
+    def release_worker(self, bundle: BundleDetails, *, was_cancelled=True) -> None:
         worker = bundle.worker
         assert worker is not None
         logger.debug(f'Released worker {worker}')
@@ -764,14 +739,14 @@ class RemoteExecutor(BaseExecutor):
         # Send input code / data to worker machine if it's not local.
         if hostname not in machine:
             try:
-                cmd = f'{SCP} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
+                cmd = (
+                    f'{SCP} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
+                )
                 start_ts = time.time()
                 logger.info(f"{bundle}: Copying work to {worker} via {cmd}.")
                 run_silently(cmd)
                 xfer_latency = time.time() - start_ts
-                logger.debug(
-                    f"{bundle}: Copying to {worker} took {xfer_latency:.1f}s."
-                )
+                logger.debug(f"{bundle}: Copying to {worker} took {xfer_latency:.1f}s.")
             except Exception as e:
                 self.release_worker(bundle)
                 if is_original:
@@ -804,9 +779,7 @@ class RemoteExecutor(BaseExecutor):
             f' /home/scott/lib/python_modules/remote_worker.py'
             f' --code_file {bundle.code_file} --result_file {bundle.result_file}"'
         )
-        logger.debug(
-            f'{bundle}: Executing {cmd} in the background to kick off work...'
-        )
+        logger.debug(f'{bundle}: Executing {cmd} in the background to kick off work...')
         p = cmd_in_background(cmd, silent=True)
         bundle.pid = p.pid
         logger.debug(
@@ -935,9 +908,7 @@ class RemoteExecutor(BaseExecutor):
                 # Re-raise the exception; the code in wait_for_process may
                 # decide to emergency_retry_nasty_bundle here.
                 raise Exception(e)
-            logger.debug(
-                f'Removing local (master) {code_file} and {result_file}.'
-            )
+            logger.debug(f'Removing local (master) {code_file} and {result_file}.')
             os.remove(f'{result_file}')
             os.remove(f'{code_file}')
 
index 12aadca1b1e47c7bc0e89f8f88d6fe30e0e9bb81..5d9a0be3b272bbc51eebf7894baac7a25fe11179 100644 (file)
@@ -366,9 +366,7 @@ def get_file_mtime_timedelta(filename: str) -> Optional[datetime.timedelta]:
     return get_file_timestamp_timedelta(filename, lambda x: x.st_mtime)
 
 
-def describe_file_timestamp(
-    filename: str, extractor, *, brief=False
-) -> Optional[str]:
+def describe_file_timestamp(filename: str, extractor, *, brief=False) -> Optional[str]:
     from datetime_utils import describe_duration, describe_duration_briefly
 
     age = get_file_timestamp_age_seconds(filename, extractor)
index 75ca6432cf76b9f84506aa549855e5cec25e1844..b92f443d6744882ddaa56418ed992b39b33c8568 100644 (file)
@@ -105,9 +105,7 @@ def ask_google(cmd: str, *, recognize_speech=True) -> GoogleResponse:
             audio_transcription=audio_transcription,
         )
     else:
-        message = (
-            f'HTTP request to {url} with {payload} failed; code {r.status_code}'
-        )
+        message = f'HTTP request to {url} with {payload} failed; code {r.status_code}'
         logger.error(message)
         return GoogleResponse(
             success=False,
index 4aa47490122481852ae03e264795509a8794e54e..a899fe9c60cde770eda49dcdd1dade1790e9ec08 100644 (file)
@@ -94,11 +94,7 @@ class SimpleHistogram(Generic[T]):
                 right_end="",
             )
             label = f'{label_formatter}..{label_formatter}' % (start, end)
-            txt += (
-                f'{label:20}: '
-                + bar
-                + f"({pop/self.count*100.0:5.2f}% n={pop})\n"
-            )
+            txt += f'{label:20}: ' + bar + f"({pop/self.count*100.0:5.2f}% n={pop})\n"
             if start == last_bucket_start:
                 break
         return txt
index 2bbe6f4e2124419bd788312f17a0e6645d921232..4b6aadeffde8ec7bf255025873d720e2b3afda93 100644 (file)
@@ -15,9 +15,7 @@ import datetime_utils
 import decorator_utils
 
 
-cfg = config.add_commandline_args(
-    f'Lockfile ({__file__})', 'Args related to lockfiles'
-)
+cfg = config.add_commandline_args(f'Lockfile ({__file__})', 'Args related to lockfiles')
 cfg.add_argument(
     '--lockfile_held_duration_warning_threshold_sec',
     type=float,
@@ -136,9 +134,7 @@ class LockFile(object):
                 duration
                 >= config.config['lockfile_held_duration_warning_threshold_sec']
             ):
-                str_duration = datetime_utils.describe_duration_briefly(
-                    duration
-                )
+                str_duration = datetime_utils.describe_duration_briefly(duration)
                 msg = f'Held {self.lockfile} for {str_duration}'
                 logger.warning(msg)
                 warnings.warn(msg, stacklevel=2)
index bf8d8b062b911507ccbd7f68f5346530c7bd0d79..2d9d63b78f8543890bb25fa879667bcc59294af1 100644 (file)
@@ -22,9 +22,7 @@ import pytz
 import argparse_utils
 import config
 
-cfg = config.add_commandline_args(
-    f'Logging ({__file__})', 'Args related to logging'
-)
+cfg = config.add_commandline_args(f'Logging ({__file__})', 'Args related to logging')
 cfg.add_argument(
     '--logging_config_file',
     type=argparse_utils.valid_filename,
@@ -233,9 +231,7 @@ class SquelchRepeatedMessagesFilter(logging.Filter):
         if id1 not in squelched_logging_counts:
             return True
         threshold = squelched_logging_counts[id1]
-        logsite = (
-            f'{record.pathname}+{record.lineno}+{record.levelno}+{record.msg}'
-        )
+        logsite = f'{record.pathname}+{record.lineno}+{record.levelno}+{record.msg}'
         count = self.counters[logsite]
         self.counters[logsite] += 1
         return count < threshold
@@ -444,12 +440,8 @@ def initialize_logging(logger=None) -> logging.Logger:
     if config.config['logging_syslog']:
         if sys.platform not in ('win32', 'cygwin'):
             if config.config['logging_syslog_facility']:
-                facility_name = (
-                    'LOG_' + config.config['logging_syslog_facility']
-                )
-            facility = SysLogHandler.__dict__.get(
-                facility_name, SysLogHandler.LOG_USER
-            )
+                facility_name = 'LOG_' + config.config['logging_syslog_facility']
+            facility = SysLogHandler.__dict__.get(facility_name, SysLogHandler.LOG_USER)
             handler = SysLogHandler(facility=facility, address='/dev/log')
             handler.setFormatter(
                 MillisecondAwareFormatter(
@@ -533,9 +525,7 @@ def initialize_logging(logger=None) -> logging.Logger:
     level_name = logging._levelToName.get(
         default_logging_level, str(default_logging_level)
     )
-    logger.debug(
-        f'Initialized global logging; default logging level is {level_name}.'
-    )
+    logger.debug(f'Initialized global logging; default logging level is {level_name}.')
     if (
         config.config['logging_clear_preexisting_handlers']
         and preexisting_handlers_count > 0
@@ -664,23 +654,17 @@ class OutputMultiplexer(object):
         self.logger = logger
 
         if filenames is not None:
-            self.f = [
-                open(filename, 'wb', buffering=0) for filename in filenames
-            ]
+            self.f = [open(filename, 'wb', buffering=0) for filename in filenames]
         else:
             if destination_bitv & OutputMultiplexer.FILENAMES:
-                raise ValueError(
-                    "Filenames argument is required if bitv & FILENAMES"
-                )
+                raise ValueError("Filenames argument is required if bitv & FILENAMES")
             self.f = None
 
         if handles is not None:
             self.h = [handle for handle in handles]
         else:
             if destination_bitv & OutputMultiplexer.Destination.FILEHANDLES:
-                raise ValueError(
-                    "Handle argument is required if bitv & FILEHANDLES"
-                )
+                raise ValueError("Handle argument is required if bitv & FILEHANDLES")
             self.h = None
 
         self.set_destination_bitv(destination_bitv)
@@ -690,13 +674,9 @@ class OutputMultiplexer(object):
 
     def set_destination_bitv(self, destination_bitv: int):
         if destination_bitv & self.Destination.FILENAMES and self.f is None:
-            raise ValueError(
-                "Filename argument is required if bitv & FILENAMES"
-            )
+            raise ValueError("Filename argument is required if bitv & FILENAMES")
         if destination_bitv & self.Destination.FILEHANDLES and self.h is None:
-            raise ValueError(
-                "Handle argument is required if bitv & FILEHANDLES"
-            )
+            raise ValueError("Handle argument is required if bitv & FILEHANDLES")
         self.destination_bitv = destination_bitv
 
     def print(self, *args, **kwargs):
@@ -719,18 +699,12 @@ class OutputMultiplexer(object):
             end = "\n"
         if end == '\n':
             buf += '\n'
-        if (
-            self.destination_bitv & self.Destination.FILENAMES
-            and self.f is not None
-        ):
+        if self.destination_bitv & self.Destination.FILENAMES and self.f is not None:
             for _ in self.f:
                 _.write(buf.encode('utf-8'))
                 _.flush()
 
-        if (
-            self.destination_bitv & self.Destination.FILEHANDLES
-            and self.h is not None
-        ):
+        if self.destination_bitv & self.Destination.FILEHANDLES and self.h is not None:
             for _ in self.h:
                 _.write(buf)
                 _.flush()
index 85f946135e406e74137a632bceff2a5a1845c699..c324ff850895bb94ee29d84448c4449ad5fe1727 100644 (file)
@@ -30,9 +30,7 @@ class Document(NamedTuple):
 
     docid: str  # a unique idenfier for the document
     tags: Set[str]  # an optional set of tags
-    properties: List[
-        Tuple[str, str]
-    ]  # an optional set of key->value properties
+    properties: List[Tuple[str, str]]  # an optional set of key->value properties
     reference: Any  # an optional reference to something else
 
 
@@ -102,9 +100,7 @@ class Corpus(object):
 
     def __init__(self) -> None:
         self.docids_by_tag: Dict[str, Set[str]] = defaultdict(set)
-        self.docids_by_property: Dict[Tuple[str, str], Set[str]] = defaultdict(
-            set
-        )
+        self.docids_by_property: Dict[Tuple[str, str], Set[str]] = defaultdict(set)
         self.docids_with_property: Dict[str, Set[str]] = defaultdict(set)
         self.documents_by_docid: Dict[str, Document] = {}
 
@@ -183,11 +179,7 @@ class Corpus(object):
         """Invert a set of docids."""
 
         return set(
-            [
-                docid
-                for docid in self.documents_by_docid.keys()
-                if docid not in original
-            ]
+            [docid for docid in self.documents_by_docid.keys() if docid not in original]
         )
 
     def get_doc(self, docid: str) -> Optional[Document]:
@@ -297,9 +289,7 @@ class Corpus(object):
                         ok = True
                         break
                 if not ok:
-                    raise ParseError(
-                        "Unbalanced parenthesis in query expression"
-                    )
+                    raise ParseError("Unbalanced parenthesis in query expression")
 
             # and, or, not
             else:
@@ -376,23 +366,17 @@ class Node(object):
                     raise ParseError(f"Unexpected query {tag}")
         elif self.op is Operation.DISJUNCTION:
             if len(evaled_operands) != 2:
-                raise ParseError(
-                    "Operation.DISJUNCTION (or) expects two operands."
-                )
+                raise ParseError("Operation.DISJUNCTION (or) expects two operands.")
             retval.update(evaled_operands[0])
             retval.update(evaled_operands[1])
         elif self.op is Operation.CONJUNCTION:
             if len(evaled_operands) != 2:
-                raise ParseError(
-                    "Operation.CONJUNCTION (and) expects two operands."
-                )
+                raise ParseError("Operation.CONJUNCTION (and) expects two operands.")
             retval.update(evaled_operands[0])
             retval = retval.intersection(evaled_operands[1])
         elif self.op is Operation.INVERSION:
             if len(evaled_operands) != 1:
-                raise ParseError(
-                    "Operation.INVERSION (not) expects one operand."
-                )
+                raise ParseError("Operation.INVERSION (not) expects one operand.")
             _ = evaled_operands[0]
             if isinstance(_, set):
                 retval.update(self.corpus.invert_docid_set(_))
index 98f883c5716ae8fa3d67101b1d38d20dece7a0b6..698a7eca130d2ffc64a9619c89bdc2be5eb34ba9 100644 (file)
@@ -15,9 +15,7 @@ class Method(Enum):
 
 
 def parallelize(
-    _funct: typing.Optional[typing.Callable] = None,
-    *,
-    method: Method = Method.THREAD
+    _funct: typing.Optional[typing.Callable] = None, *, method: Method = Method.THREAD
 ) -> typing.Callable:
     """Usage:
 
index 5c2b132448ebce64271c0165a8848337db5e69c8..d62dd6754eeffc78c1c09adea7c82e778f8450b3 100644 (file)
@@ -65,11 +65,7 @@ def was_file_written_today(filename: str) -> bool:
 
     mtime = file_utils.get_file_mtime_as_datetime(filename)
     now = datetime.datetime.now()
-    return (
-        mtime.month == now.month
-        and mtime.day == now.day
-        and mtime.year == now.year
-    )
+    return mtime.month == now.month and mtime.day == now.day and mtime.year == now.year
 
 
 def was_file_written_within_n_seconds(
@@ -144,16 +140,12 @@ class persistent_autoloaded_singleton(object):
 
             # Otherwise, try to load it from persisted state.
             was_loaded = False
-            logger.debug(
-                f'Attempting to load {cls.__name__} from persisted state.'
-            )
+            logger.debug(f'Attempting to load {cls.__name__} from persisted state.')
             self.instance = cls.load()
             if not self.instance:
                 msg = 'Loading from cache failed.'
                 logger.warning(msg)
-                logger.debug(
-                    f'Attempting to instantiate {cls.__name__} directly.'
-                )
+                logger.debug(f'Attempting to instantiate {cls.__name__} directly.')
                 self.instance = cls(*args, **kwargs)
             else:
                 logger.debug(
index 95540fa7b36f0bd8fcf813196e2f9f2390569fce..4723a2db0679e5f866f14bbb723c66391fa06ae6 100755 (executable)
@@ -494,9 +494,7 @@ class ProfanityFilter(object):
         result = result.replace('3', 'e')
         for x in string.punctuation:
             result = result.replace(x, "")
-        chunks = [
-            self.stemmer.stem(word) for word in nltk.word_tokenize(result)
-        ]
+        chunks = [self.stemmer.stem(word) for word in nltk.word_tokenize(result)]
         return ' '.join(chunks)
 
     def tokenize(self, text: str):
index 62c2b98d347c67954980e794a93e5edebcf705a2..b09e735c79c8f92665438b2c064f9ccf652d33bd 100644 (file)
@@ -97,9 +97,7 @@ def get_config():
             network_netmask='255.255.255.0',
             network_router_ip='10.0.0.1',
             presence_location=Location.HOUSE,
-            is_anyone_present=lambda x=Location.HOUSE: is_anyone_present_wrapper(
-                x
-            ),
+            is_anyone_present=lambda x=Location.HOUSE: is_anyone_present_wrapper(x),
             arper_minimum_device_count=50,
         )
     elif location == 'CABIN':
@@ -110,9 +108,7 @@ def get_config():
             network_netmask='255.255.255.0',
             network_router_ip='192.168.0.1',
             presence_location=Location.CABIN,
-            is_anyone_present=lambda x=Location.CABIN: is_anyone_present_wrapper(
-                x
-            ),
+            is_anyone_present=lambda x=Location.CABIN: is_anyone_present_wrapper(x),
             arper_minimum_device_count=15,
         )
     else:
index 4836e3eb99915a6d25a06102ee0c93163cd69133..453faf7b1972d8f4f1b3250bfdd353150e682503 100644 (file)
@@ -73,21 +73,15 @@ class StateTracker(ABC):
         for update_id in sorted(self.last_reminder_ts.keys()):
             if force_all_updates_to_run:
                 logger.debug('Forcing all updates to run')
-                self.update(
-                    update_id, self.now, self.last_reminder_ts[update_id]
-                )
+                self.update(update_id, self.now, self.last_reminder_ts[update_id])
                 self.last_reminder_ts[update_id] = self.now
                 return
 
             refresh_secs = self.update_ids_to_update_secs[update_id]
             last_run = self.last_reminder_ts[update_id]
             if last_run is None:  # Never run before
-                logger.debug(
-                    f'id {update_id} has never been run; running it now'
-                )
-                self.update(
-                    update_id, self.now, self.last_reminder_ts[update_id]
-                )
+                logger.debug(f'id {update_id} has never been run; running it now')
+                self.update(update_id, self.now, self.last_reminder_ts[update_id])
                 self.last_reminder_ts[update_id] = self.now
             else:
                 delta = self.now - last_run
@@ -148,9 +142,7 @@ class AutomaticStateTracker(StateTracker):
         This may block for as long as self.sleep_delay.
 
         """
-        logger.debug(
-            'Setting shutdown event and waiting for background thread.'
-        )
+        logger.debug('Setting shutdown event and waiting for background thread.')
         self.should_terminate.set()
         self.updater_thread.join()
         logger.debug('Background thread terminated.')
index 94df3e3499756a86955ebc3ee7330e5329720255..cfed1699dc3f29090a7846bff8dc2d3c89fa9174 100644 (file)
@@ -136,9 +136,7 @@ def distribute_strings(
     return retval
 
 
-def justify_string_by_chunk(
-    string: str, width: int = 80, padding: str = " "
-) -> str:
+def justify_string_by_chunk(string: str, width: int = 80, padding: str = " ") -> str:
     """
     Justifies a string.
 
index ad1f0bf9029b3232ba9cbd28b085afade91f0186..d8c85f46fbcaed33864d591458c64a1cebeb162d 100644 (file)
@@ -61,9 +61,7 @@ def background_thread(
 
     def wrapper(funct: Callable):
         @functools.wraps(funct)
-        def inner_wrapper(
-            *a, **kwa
-        ) -> Tuple[threading.Thread, threading.Event]:
+        def inner_wrapper(*a, **kwa) -> Tuple[threading.Thread, threading.Event]:
             should_terminate = threading.Event()
             should_terminate.clear()
             newargs = (*a, should_terminate)
@@ -130,9 +128,7 @@ def periodically_invoke(
             should_terminate = threading.Event()
             should_terminate.clear()
             newargs = (should_terminate, *args)
-            thread = threading.Thread(
-                target=helper_thread, args=newargs, kwargs=kwargs
-            )
+            thread = threading.Thread(target=helper_thread, args=newargs, kwargs=kwargs)
             thread.start()
             logger.debug(f'Started thread {thread.name} tid={thread.ident}')
             return (thread, should_terminate)
index 4a9669d3a21f66e35004e1968cc85b65d711fd5c..f4fed35f09fdf29970820bef8566652825327634 100644 (file)
@@ -87,9 +87,7 @@ class PerfRegressionDataPersister(ABC):
         pass
 
     @abstractmethod
-    def save_performance_data(
-        self, method_id: str, data: Dict[str, List[float]]
-    ):
+    def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
         pass
 
     @abstractmethod
@@ -106,9 +104,7 @@ class FileBasedPerfRegressionDataPersister(PerfRegressionDataPersister):
         with open(self.filename, 'rb') as f:
             return pickle.load(f)
 
-    def save_performance_data(
-        self, method_id: str, data: Dict[str, List[float]]
-    ):
+    def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
         for trace in self.traces_to_delete:
             if trace in data:
                 data[trace] = []
@@ -138,9 +134,7 @@ class DatabasePerfRegressionDataPersister(PerfRegressionDataPersister):
         results.close()
         return ret
 
-    def save_performance_data(
-        self, method_id: str, data: Dict[str, List[float]]
-    ):
+    def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
         self.delete_performance_data(method_id)
         for (method_id, perf_data) in data.items():
             sql = 'INSERT INTO runtimes_by_function (function, runtime) VALUES '
@@ -174,9 +168,7 @@ def check_method_for_perf_regressions(func: Callable) -> Callable:
             )
             helper = DatabasePerfRegressionDataPersister(dbspec)
         else:
-            raise Exception(
-                'Unknown/unexpected --unittests_persistance_strategy value'
-            )
+            raise Exception('Unknown/unexpected --unittests_persistance_strategy value')
 
         func_id = function_utils.function_identifier(func)
         func_name = func.__name__
@@ -212,16 +204,10 @@ def check_method_for_perf_regressions(func: Callable) -> Callable:
             stdev = statistics.stdev(hist)
             logger.debug(f'For {func_name}, performance stdev={stdev}')
             slowest = hist[-1]
-            logger.debug(
-                f'For {func_name}, slowest perf on record is {slowest:f}s'
-            )
+            logger.debug(f'For {func_name}, slowest perf on record is {slowest:f}s')
             limit = slowest + stdev * 4
-            logger.debug(
-                f'For {func_name}, max acceptable runtime is {limit:f}s'
-            )
-            logger.debug(
-                f'For {func_name}, actual observed runtime was {run_time:f}s'
-            )
+            logger.debug(f'For {func_name}, max acceptable runtime is {limit:f}s')
+            logger.debug(f'For {func_name}, actual observed runtime was {run_time:f}s')
             if run_time > limit and not config.config['unittests_ignore_perf']:
                 msg = f'''{func_id} performance has regressed unacceptably.
 {slowest:f}s is the slowest runtime on record in {len(hist)} perf samples.
index 3abb6d817a633147382d0ac82fea37cf4450e816..d3686d67ed763d9b240b58ee9d43b5151ed31ea8 100644 (file)
@@ -121,13 +121,9 @@ class Unscrambler(object):
 
     # 52 bits
     @staticmethod
-    def _compute_word_fingerprint(
-        word: str, population: Mapping[str, int]
-    ) -> int:
+    def _compute_word_fingerprint(word: str, population: Mapping[str, int]) -> int:
         fp = 0
-        for pair in sorted(
-            population.items(), key=lambda x: x[1], reverse=True
-        ):
+        for pair in sorted(population.items(), key=lambda x: x[1], reverse=True):
             letter = pair[0]
             if letter in fprint_feature_bit:
                 count = pair[1]
@@ -146,9 +142,7 @@ class Unscrambler(object):
         population: Mapping[str, int],
     ) -> int:
         sig = 0
-        for pair in sorted(
-            population.items(), key=lambda x: x[1], reverse=True
-        ):
+        for pair in sorted(population.items(), key=lambda x: x[1], reverse=True):
             letter = pair[0]
             if letter not in letter_sigs:
                 continue
@@ -189,9 +183,7 @@ class Unscrambler(object):
         """
         population = list_utils.population_counts(word)
         fprint = Unscrambler._compute_word_fingerprint(word, population)
-        letter_sig = Unscrambler._compute_word_letter_sig(
-            letter_sigs, word, population
-        )
+        letter_sig = Unscrambler._compute_word_letter_sig(letter_sigs, word, population)
         assert fprint & letter_sig == 0
         sig = fprint | letter_sig
         return sig
@@ -238,9 +230,7 @@ class Unscrambler(object):
 
         """
         sig = Unscrambler.compute_word_sig(word)
-        return self.lookup_by_sig(
-            sig, include_fuzzy_matches=include_fuzzy_matches
-        )
+        return self.lookup_by_sig(sig, include_fuzzy_matches=include_fuzzy_matches)
 
     def lookup_by_sig(
         self, sig: int, *, include_fuzzy_matches: bool = False
index cd5501da59fc3e3ddd1022f8fd87cfba756aeffc..d54511ff362bc45ceaf5c95e15174523f0327be9 100644 (file)
@@ -20,9 +20,7 @@ import state_tracker
 logger = logging.getLogger(__name__)
 
 
-class WaitablePresenceDetectorWithMemory(
-    state_tracker.WaitableAutomaticStateTracker
-):
+class WaitablePresenceDetectorWithMemory(state_tracker.WaitableAutomaticStateTracker):
     """
     This is a waitable class that keeps a PresenceDetector internally
     and periodically polls it to detect changes in presence in a
@@ -88,9 +86,7 @@ class WaitablePresenceDetectorWithMemory(
 
     def check_detector(self) -> None:
         if len(self.detector.dark_locations) > 0:
-            logger.debug(
-                'PresenceDetector is incomplete; trying to reinitialize...'
-            )
+            logger.debug('PresenceDetector is incomplete; trying to reinitialize...')
             self.detector = base_presence.PresenceDetection()
 
     def is_someone_home(self) -> Tuple[bool, datetime.datetime]: