Change settings in flake8 and black.
authorScott Gasch <[email protected]>
Thu, 3 Feb 2022 22:18:37 +0000 (14:18 -0800)
committerScott Gasch <[email protected]>
Thu, 3 Feb 2022 22:18:37 +0000 (14:18 -0800)
45 files changed:
acl.py
ansi.py
arper.py
base_presence.py
bootstrap.py
cached/weather_forecast.py
camera_utils.py
collect/bidict.py
collect/bst.py
collect/shared_dict.py
collect/trie.py
config.py
conversion_utils.py
dateparse/dateparse_utils.py
datetime_utils.py
decorator_utils.py
executors.py
file_utils.py
lockfile.py
logging_utils.py
logical_search.py
ml/model_trainer.py
persistent.py
smart_home/lights.py
smart_home/outlets.py
smart_home/thermometers.py
state_tracker.py
string_utils.py
tests/centcount_test.py
tests/dateparse_utils_test.py
tests/decorator_utils_test.py
tests/letter_compress_test.py
tests/logging_utils_test.py
tests/money_test.py
tests/profanity_filter_test.py
tests/rate_test.py
tests/string_utils_test.py
text_utils.py
type/centcount.py
type/locations.py
type/money.py
type/people.py
unittest_utils.py
unscrambler.py
waitable_presence.py

diff --git a/acl.py b/acl.py
index a936339a0dcb715db02327125c9f5b54e9b100d3..a1ff4051d80087b3c55ee085a1c1dd47c7611d73 100644 (file)
--- a/acl.py
+++ b/acl.py
@@ -34,8 +34,7 @@ class SimpleACL(ABC):
             Order.DENY_ALLOW,
         ):
             raise Exception(
-                'order_to_check_allow_deny must be Order.ALLOW_DENY or '
-                + 'Order.DENY_ALLOW'
+                'order_to_check_allow_deny must be Order.ALLOW_DENY or ' + 'Order.DENY_ALLOW'
             )
         self.order_to_check_allow_deny = order_to_check_allow_deny
         self.default_answer = default_answer
@@ -193,16 +192,12 @@ class StringWildcardBasedACL(PredicateListBasedACL):
         allow_predicates = []
         if allowed_patterns is not None:
             for pattern in allowed_patterns:
-                allow_predicates.append(
-                    lambda x, pattern=pattern: fnmatch.fnmatch(x, pattern)
-                )
+                allow_predicates.append(lambda x, pattern=pattern: fnmatch.fnmatch(x, pattern))
         deny_predicates = None
         if denied_patterns is not None:
             deny_predicates = []
             for pattern in denied_patterns:
-                deny_predicates.append(
-                    lambda x, pattern=pattern: fnmatch.fnmatch(x, pattern)
-                )
+                deny_predicates.append(lambda x, pattern=pattern: fnmatch.fnmatch(x, pattern))
 
         super().__init__(
             allow_predicate_list=allow_predicates,
@@ -227,16 +222,12 @@ class StringREBasedACL(PredicateListBasedACL):
         if allowed_regexs is not None:
             allow_predicates = []
             for pattern in allowed_regexs:
-                allow_predicates.append(
-                    lambda x, pattern=pattern: pattern.match(x) is not None
-                )
+                allow_predicates.append(lambda x, pattern=pattern: pattern.match(x) is not None)
         deny_predicates = None
         if denied_regexs is not None:
             deny_predicates = []
             for pattern in denied_regexs:
-                deny_predicates.append(
-                    lambda x, pattern=pattern: pattern.match(x) is not None
-                )
+                deny_predicates.append(lambda x, pattern=pattern: pattern.match(x) is not None)
         super().__init__(
             allow_predicate_list=allow_predicates,
             deny_predicate_list=deny_predicates,
diff --git a/ansi.py b/ansi.py
index 02741e1f7fde542568883787084eaee726d0a79a..1633fddbcb31714d3ae7342daca37c1c4034b4c6 100755 (executable)
--- a/ansi.py
+++ b/ansi.py
@@ -1776,9 +1776,7 @@ def fg(
     if (is_16color(red) and is_16color(green) and is_16color(blue)) or force_16color:
         logger.debug("Using 16-color strategy")
         return fg_16color(red, green, blue)
-    if (
-        is_216color(red) and is_216color(green) and is_216color(blue)
-    ) or force_216color:
+    if (is_216color(red) and is_216color(green) and is_216color(blue)) or force_216color:
         logger.debug("Using 216-color strategy")
         return fg_216color(red, green, blue)
     logger.debug("Using 24-bit color strategy")
@@ -1880,9 +1878,7 @@ def bg(
     if (is_16color(red) and is_16color(green) and is_16color(blue)) or force_16color:
         logger.debug("Using 16-color strategy")
         return bg_16color(red, green, blue)
-    if (
-        is_216color(red) and is_216color(green) and is_216color(blue)
-    ) or force_216color:
+    if (is_216color(red) and is_216color(green) and is_216color(blue)) or force_216color:
         logger.debug("Using 216-color strategy")
         return bg_216color(red, green, blue)
     logger.debug("Using 24-bit color strategy")
@@ -1939,9 +1935,6 @@ if __name__ == '__main__':
                 _ = pick_contrasting_color(possibility)
                 xf = fg(None, _[0], _[1], _[2])
                 xb = bg(None, _[0], _[1], _[2])
-                print(
-                    f'{f}{xb}{possibility}{reset()}\t\t\t'
-                    f'{b}{xf}{possibility}{reset()}'
-                )
+                print(f'{f}{xb}{possibility}{reset()}\t\t\t' f'{b}{xf}{possibility}{reset()}')
 
     main()
index ff1168ea4ef30b6f0ca16f5411e188b113548a0d..bdd97ac2c93b8109fcfb187f5bc7b455fad7a009 100644 (file)
--- a/arper.py
+++ b/arper.py
@@ -59,9 +59,7 @@ class Arper(persistent.Persistent):
             self.update_from_arp_scan()
             self.update_from_arp()
         if len(self.state) < config.config['arper_min_entries_to_be_valid']:
-            raise Exception(
-                f'Arper didn\'t find enough entries; only got {len(self.state)}.'
-            )
+            raise Exception(f'Arper didn\'t find enough entries; only got {len(self.state)}.')
 
     def update_from_arp_scan(self):
         network_spec = site_config.get_config().network
@@ -76,12 +74,7 @@ class Arper(persistent.Persistent):
         for line in output.split('\n'):
             ip = string_utils.extract_ip_v4(line)
             mac = string_utils.extract_mac_address(line)
-            if (
-                ip is not None
-                and mac is not None
-                and mac != 'UNKNOWN'
-                and ip != 'UNKNOWN'
-            ):
+            if ip is not None and mac is not None and mac != 'UNKNOWN' and ip != 'UNKNOWN':
                 mac = mac.lower()
                 logger.debug(f'ARPER: {mac} => {ip}')
                 self.state[mac] = ip
@@ -95,12 +88,7 @@ class Arper(persistent.Persistent):
         for line in output.split('\n'):
             ip = string_utils.extract_ip_v4(line)
             mac = string_utils.extract_mac_address(line)
-            if (
-                ip is not None
-                and mac is not None
-                and mac != 'UNKNOWN'
-                and ip != 'UNKNOWN'
-            ):
+            if ip is not None and mac is not None and mac != 'UNKNOWN' and ip != 'UNKNOWN':
                 mac = mac.lower()
                 logger.debug(f'ARPER: {mac} => {ip}')
                 self.state[mac] = ip
index ad852f9f7ebce82d5a76c6d7f1436a78670489e7..fa035fd28f5645147dc0b5f68e2ee6dfe9589f88 100755 (executable)
@@ -69,9 +69,7 @@ class PresenceDetection(object):
         self.run_location = site_config.get_location()
         logger.debug(f"run_location is {self.run_location}")
         self.weird_mac_at_cabin = False
-        self.location_ts_by_mac: Dict[
-            Location, Dict[str, datetime.datetime]
-        ] = defaultdict(dict)
+        self.location_ts_by_mac: Dict[Location, Dict[str, datetime.datetime]] = defaultdict(dict)
         self.names_by_mac: Dict[str, str] = {}
         self.dark_locations: Set[Location] = set()
         self.last_update: Optional[datetime.datetime] = None
@@ -200,9 +198,7 @@ class PresenceDetection(object):
     def where_is_person_now(self, name: Person) -> Location:
         self.maybe_update()
         if len(self.dark_locations) > 0:
-            msg = (
-                f"Can't see {self.dark_locations} right now; answer confidence impacted"
-            )
+            msg = f"Can't see {self.dark_locations} right now; answer confidence impacted"
             logger.warning(msg)
             warnings.warn(msg, stacklevel=2)
         logger.debug(f'Looking for {name}...')
index 035a38eca691ca7255a4ee4bda0eee6da0813119..c89952a838dfb345acbc5cc2f2567d5c028a3287 100644 (file)
@@ -124,9 +124,7 @@ class ImportInterceptor(importlib.abc.MetaPathFinder):
         return 'importlib' in filename or 'six.py' in filename
 
     def find_module(self, fullname, path):
-        raise Exception(
-            "This method has been deprecated since Python 3.4, please upgrade."
-        )
+        raise Exception("This method has been deprecated since Python 3.4, please upgrade.")
 
     def find_spec(self, loaded_module, path=None, target=None):
         s = stack()
@@ -230,10 +228,7 @@ def initialize(entry_point):
         # Try to figure out the name of the program entry point.  Then
         # parse configuration (based on cmdline flags, environment vars
         # etc...)
-        if (
-            '__globals__' in entry_point.__dict__
-            and '__file__' in entry_point.__globals__
-        ):
+        if '__globals__' in entry_point.__dict__ and '__file__' in entry_point.__globals__:
             config.parse(entry_point.__globals__['__file__'])
         else:
             config.parse(None)
index 58f53c383cb2425f1114165cfe3c151978e69cf5..807f36d2bcd9c5770d6fa5e5bc897dcc791f8ca1 100644 (file)
@@ -81,7 +81,8 @@ class CachedDetailedWeatherForecast(persistent.Persistent):
         last_dt = now
         dt = now
         for (day, txt) in zip(
-            forecast.find_all('b'), forecast.find_all(class_='col-sm-10 forecast-text')
+            forecast.find_all('b'),
+            forecast.find_all(class_='col-sm-10 forecast-text'),
         ):
             last_dt = dt
             try:
@@ -91,9 +92,7 @@ class CachedDetailedWeatherForecast(persistent.Persistent):
             assert dt is not None
 
             # Compute sunrise/sunset times on dt.
-            city = astral.LocationInfo(
-                "Bellevue", "USA", "US/Pacific", 47.653, -122.171
-            )
+            city = astral.LocationInfo("Bellevue", "USA", "US/Pacific", 47.653, -122.171)
             s = sun(city.observer, date=dt, tzinfo=pytz.timezone("US/Pacific"))
             sunrise = s['sunrise']
             sunset = s['sunset']
index 9e7efd6dfccbd7df2d1eb8bd13eb8075a6bfe4f1..f5d295b6ed7477815fe502eed82eaea8a2c4831c 100644 (file)
@@ -46,11 +46,7 @@ def sanity_check_image(hsv: np.ndarray) -> SanityCheckImageMetadata:
     for r in range(rows):
         for c in range(cols):
             pixel = hsv[(r, c)]
-            if (
-                is_near(pixel[0], 16)
-                and is_near(pixel[1], 117)
-                and is_near(pixel[2], 196)
-            ):
+            if is_near(pixel[0], 16) and is_near(pixel[1], 117) and is_near(pixel[2], 196):
                 weird_orange_count += 1
             elif is_near(pixel[0], 0) and is_near(pixel[1], 0):
                 hs_zero_count += 1
@@ -68,7 +64,9 @@ def fetch_camera_image_from_video_server(
     """Fetch the raw webcam image from the video server."""
     camera_name = camera_name.replace(".house", "")
     camera_name = camera_name.replace(".cabin", "")
-    url = f"http://10.0.0.226:8080/Umtxxf1uKMBniFblqeQ9KRbb6DDzN4/jpeg/GKlT2FfiSQ/{camera_name}/s.jpg"
+    url = (
+        f"http://10.0.0.226:8080/Umtxxf1uKMBniFblqeQ9KRbb6DDzN4/jpeg/GKlT2FfiSQ/{camera_name}/s.jpg"
+    )
     logger.debug(f'Fetching image from {url}')
     try:
         response = requests.get(url, stream=False, timeout=10.0)
@@ -123,9 +121,7 @@ def camera_name_to_hostname(camera_name: str) -> str:
 
 
 @decorator_utils.retry_if_none(tries=2, delay_sec=1, backoff=1.1)
-def fetch_camera_image_from_rtsp_stream(
-    camera_name: str, *, width: int = 256
-) -> Optional[bytes]:
+def fetch_camera_image_from_rtsp_stream(camera_name: str, *, width: int = 256) -> Optional[bytes]:
     """Fetch the raw webcam image straight from the webcam's RTSP stream."""
     hostname = camera_name_to_hostname(camera_name)
     stream = f"rtsp://camera:IaLaIok@{hostname}:554/live"
@@ -147,9 +143,7 @@ def fetch_camera_image_from_rtsp_stream(
             f"scale={width}:-1",
             "-",
         ]
-        with subprocess.Popen(
-            cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
-        ) as proc:
+        with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) as proc:
             out, _ = proc.communicate(timeout=10)
             return out
     except Exception as e:
@@ -161,14 +155,10 @@ def fetch_camera_image_from_rtsp_stream(
 
 
 @decorator_utils.timeout(seconds=30, use_signals=False)
-def _fetch_camera_image(
-    camera_name: str, *, width: int = 256, quality: int = 70
-) -> RawJpgHsv:
+def _fetch_camera_image(camera_name: str, *, width: int = 256, quality: int = 70) -> RawJpgHsv:
     """Fetch a webcam image given the camera name."""
     logger.debug("Trying to fetch camera image from video server")
-    raw = fetch_camera_image_from_video_server(
-        camera_name, width=width, quality=quality
-    )
+    raw = fetch_camera_image_from_video_server(camera_name, width=width, quality=quality)
     if raw is None:
         logger.debug("Reading from video server failed; trying direct RTSP stream")
         raw = fetch_camera_image_from_rtsp_stream(camera_name, width=width)
@@ -187,9 +177,7 @@ def _fetch_camera_image(
     return RawJpgHsv(None, None, None)
 
 
-def fetch_camera_image(
-    camera_name: str, *, width: int = 256, quality: int = 70
-) -> RawJpgHsv:
+def fetch_camera_image(camera_name: str, *, width: int = 256, quality: int = 70) -> RawJpgHsv:
     try:
         return _fetch_camera_image(camera_name, width=width, quality=quality)
     except exceptions.TimeoutError:
index 8153e54446c8a4c5a36f96ba85e0b93c9afafe18..d28817453775e6f4dc35804a445dc2f6a9915bc4 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python3
 
+
 class BiDict(dict):
     def __init__(self, *args, **kwargs):
         """
@@ -50,4 +51,5 @@ class BiDict(dict):
 
 if __name__ == '__main__':
     import doctest
+
     doctest.testmod()
index 8602ce698d9c5d7970f890cfd97d97b8ccb4ffda..712683eb59ea3c38939ebf4f5a4cb31c191199f8 100644 (file)
@@ -90,9 +90,7 @@ class BinarySearchTree(object):
             return self._find(value, node.right)
         return None
 
-    def _parent_path(
-        self, current: Optional[Node], target: Node
-    ) -> List[Optional[Node]]:
+    def _parent_path(self, current: Optional[Node], target: Node) -> List[Optional[Node]]:
         if current is None:
             return [None]
         ret: List[Optional[Node]] = [current]
@@ -575,7 +573,11 @@ class BinarySearchTree(object):
         return self.depth()
 
     def repr_traverse(
-        self, padding: str, pointer: str, node: Optional[Node], has_right_sibling: bool
+        self,
+        padding: str,
+        pointer: str,
+        node: Optional[Node],
+        has_right_sibling: bool,
     ) -> str:
         if node is not None:
             viz = f'\n{padding}{pointer}{node.value}'
@@ -590,9 +592,7 @@ class BinarySearchTree(object):
             else:
                 pointer_left = "└──"
 
-            viz += self.repr_traverse(
-                padding, pointer_left, node.left, node.right is not None
-            )
+            viz += self.repr_traverse(padding, pointer_left, node.left, node.right is not None)
             viz += self.repr_traverse(padding, pointer_right, node.right, False)
             return viz
         return ""
@@ -628,9 +628,7 @@ class BinarySearchTree(object):
         else:
             pointer_left = "├──"
 
-        ret += self.repr_traverse(
-            '', pointer_left, self.root.left, self.root.left is not None
-        )
+        ret += self.repr_traverse('', pointer_left, self.root.left, self.root.left is not None)
         ret += self.repr_traverse('', pointer_right, self.root.right, False)
         return ret
 
index ec76138b393955d47f3850bf2a91d425ada563b1..e0a42f2c55c2fc865b0c89642d458ae26009c224 100644 (file)
@@ -31,16 +31,7 @@ import pickle
 from contextlib import contextmanager
 from functools import wraps
 from multiprocessing import RLock, shared_memory
-from typing import (
-    Any,
-    Dict,
-    Generator,
-    ItemsView,
-    Iterator,
-    KeysView,
-    Optional,
-    ValuesView,
-)
+from typing import Any, Dict, Generator, ItemsView, Iterator, KeysView, Optional, ValuesView
 
 from decorator_utils import synchronized
 
@@ -94,9 +85,7 @@ class SharedDict(object):
             return shared_memory.SharedMemory(name=name, create=True, size=size_bytes)
 
     def _ensure_memory_initialization(self):
-        memory_is_empty = (
-            bytes(self.shared_memory.buf).split(SharedDict.NULL_BYTE, 1)[0] == b''
-        )
+        memory_is_empty = bytes(self.shared_memory.buf).split(SharedDict.NULL_BYTE, 1)[0] == b''
         if memory_is_empty:
             self.clear()
 
index 3e4c9172fbbf3b01202f6c9ccc5b2d4ff607fcc1..70d57b1338ca8e19e4bde07e33e440a846bdc79e 100644 (file)
@@ -11,6 +11,7 @@ class Trie(object):
     for examples.
 
     """
+
     def __init__(self):
         self.root = {}
         self.end = "~END~"
@@ -241,7 +242,14 @@ class Trie(object):
             return None
         return [x for x in node if x != self.end]
 
-    def repr_fancy(self, padding: str, pointer: str, parent: str, node: Any, has_sibling: bool):
+    def repr_fancy(
+        self,
+        padding: str,
+        pointer: str,
+        parent: str,
+        node: Any,
+        has_sibling: bool,
+    ):
         if node is None:
             return
         if node is not self.root:
@@ -328,4 +336,5 @@ class Trie(object):
 
 if __name__ == '__main__':
     import doctest
+
     doctest.testmod()
index 588b7e072006d6d27cbf115e7220f9e40b6cc706..42a7044d21c6d86c0f4a793b81d7a70ed23b04e9 100644 (file)
--- a/config.py
+++ b/config.py
@@ -191,9 +191,7 @@ def augment_sys_argv_from_environment_variables():
                 if env in os.environ:
                     if not is_flag_already_in_argv(var):
                         value = os.environ[env]
-                        saved_messages.append(
-                            f'Initialized from environment: {var} = {value}'
-                        )
+                        saved_messages.append(f'Initialized from environment: {var} = {value}')
                         from string_utils import to_bool
 
                         if len(chunks) == 1 and to_bool(value):
@@ -282,9 +280,7 @@ def parse(entry_module: Optional[str]) -> Dict[str, Any]:
             raise Exception(
                 f'Encountered unrecognized config argument(s) {unknown} with --config_rejects_unrecognized_arguments enabled; halting.'
             )
-        saved_messages.append(
-            f'Config encountered unrecognized commandline arguments: {unknown}'
-        )
+        saved_messages.append(f'Config encountered unrecognized commandline arguments: {unknown}')
     sys.argv = sys.argv[:1] + unknown
 
     # Check for savefile and populate it if requested.
index 8eaecd5bd7b8227b6cf8baeee4723e6bfa0450ed..2c00ba48b362146435a2214cdd4780550b7a91c1 100644 (file)
@@ -106,9 +106,7 @@ def convert(magnitude: SupportsFloat, from_thing: str, to_thing: str) -> float:
     return _convert(magnitude, src, dst)
 
 
-def _convert(
-    magnitude: SupportsFloat, from_unit: Converter, to_unit: Converter
-) -> float:
+def _convert(magnitude: SupportsFloat, from_unit: Converter, to_unit: Converter) -> float:
     canonical = from_unit.to_canonical(magnitude)
     converted = to_unit.from_canonical(canonical)
     return float(converted)
index bf1f10abb0e483a1998bd1035f9e50f9a6eef004..54a47366b6f6493ca7766d6ff72e52a0ac87cf54 100755 (executable)
@@ -24,12 +24,7 @@ import decorator_utils
 from dateparse.dateparse_utilsLexer import dateparse_utilsLexer  # type: ignore
 from dateparse.dateparse_utilsListener import dateparse_utilsListener  # type: ignore
 from dateparse.dateparse_utilsParser import dateparse_utilsParser  # type: ignore
-from datetime_utils import (
-    TimeUnit,
-    date_to_datetime,
-    datetime_to_date,
-    n_timeunits_from_base,
-)
+from datetime_utils import TimeUnit, date_to_datetime, datetime_to_date, n_timeunits_from_base
 
 logger = logging.getLogger(__name__)
 
@@ -65,9 +60,7 @@ class RaisingErrorListener(antlr4.DiagnosticErrorListener):
     def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
         raise ParseException(msg)
 
-    def reportAmbiguity(
-        self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs
-    ):
+    def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
         pass
 
     def reportAttemptingFullContext(
@@ -75,9 +68,7 @@ class RaisingErrorListener(antlr4.DiagnosticErrorListener):
     ):
         pass
 
-    def reportContextSensitivity(
-        self, recognizer, dfa, startIndex, stopIndex, prediction, configs
-    ):
+    def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
         pass
 
 
@@ -435,7 +426,14 @@ class DateParser(dateparse_utilsListener):
         micros = self.time.microsecond
 
         self.datetime = datetime.datetime(
-            year, month, day, hour, minute, second, micros, tzinfo=self.time.tzinfo
+            year,
+            month,
+            day,
+            hour,
+            minute,
+            second,
+            micros,
+            tzinfo=self.time.tzinfo,
         )
 
         # Apply resudual adjustments to times here when we have a
@@ -550,9 +548,7 @@ class DateParser(dateparse_utilsListener):
             else:
                 raise ParseException(f'Invalid Unit: "{unit}"')
 
-    def exitDeltaPlusMinusExpr(
-        self, ctx: dateparse_utilsParser.DeltaPlusMinusExprContext
-    ) -> None:
+    def exitDeltaPlusMinusExpr(self, ctx: dateparse_utilsParser.DeltaPlusMinusExprContext) -> None:
         try:
             n = ctx.nth()
             if n is None:
@@ -574,17 +570,13 @@ class DateParser(dateparse_utilsListener):
         else:
             self.context['delta_unit'] = unit
 
-    def exitDeltaNextLast(
-        self, ctx: dateparse_utilsParser.DeltaNextLastContext
-    ) -> None:
+    def exitDeltaNextLast(self, ctx: dateparse_utilsParser.DeltaNextLastContext) -> None:
         try:
             txt = ctx.getText().lower()
         except Exception:
             raise ParseException(f'Bad next/last: {ctx.getText()}')
         if 'month' in self.context or 'day' in self.context or 'year' in self.context:
-            raise ParseException(
-                'Next/last expression expected to be relative to today.'
-            )
+            raise ParseException('Next/last expression expected to be relative to today.')
         if txt[:4] == 'next':
             self.context['delta_int'] = +1
             self.context['day'] = self.now_datetime.day
@@ -613,9 +605,7 @@ class DateParser(dateparse_utilsListener):
         if 'time_delta_before_after' not in self.context:
             raise ParseException(f'Bad Before/After: {ctx.getText()}')
 
-    def exitDeltaTimeFraction(
-        self, ctx: dateparse_utilsParser.DeltaTimeFractionContext
-    ) -> None:
+    def exitDeltaTimeFraction(self, ctx: dateparse_utilsParser.DeltaTimeFractionContext) -> None:
         try:
             txt = ctx.getText().lower()[:4]
             if txt == 'quar':
@@ -629,9 +619,7 @@ class DateParser(dateparse_utilsListener):
         except Exception:
             raise ParseException(f'Bad time fraction {ctx.getText()}')
 
-    def exitDeltaBeforeAfter(
-        self, ctx: dateparse_utilsParser.DeltaBeforeAfterContext
-    ) -> None:
+    def exitDeltaBeforeAfter(self, ctx: dateparse_utilsParser.DeltaBeforeAfterContext) -> None:
         try:
             txt = ctx.getText().lower()
         except Exception:
@@ -639,9 +627,7 @@ class DateParser(dateparse_utilsListener):
         else:
             self.context['delta_before_after'] = txt
 
-    def exitDeltaTimeBeforeAfter(
-        self, ctx: dateparse_utilsParser.DeltaBeforeAfterContext
-    ) -> None:
+    def exitDeltaTimeBeforeAfter(self, ctx: dateparse_utilsParser.DeltaBeforeAfterContext) -> None:
         try:
             txt = ctx.getText().lower()
         except Exception:
@@ -803,9 +789,7 @@ class DateParser(dateparse_utilsListener):
             special = ctx.specialDate().getText().lower()
             self.context['special'] = special
         except Exception:
-            raise ParseException(
-                f'Bad specialDate expression: {ctx.specialDate().getText()}'
-            )
+            raise ParseException(f'Bad specialDate expression: {ctx.specialDate().getText()}')
         try:
             mod = ctx.thisNextLast()
             if mod is not None:
@@ -889,9 +873,7 @@ class DateParser(dateparse_utilsListener):
         self.context['month'] = d.month
         self.context['day'] = d.day
 
-    def exitSpecialTimeExpr(
-        self, ctx: dateparse_utilsParser.SpecialTimeExprContext
-    ) -> None:
+    def exitSpecialTimeExpr(self, ctx: dateparse_utilsParser.SpecialTimeExprContext) -> None:
         try:
             txt = ctx.specialTime().getText().lower()
         except Exception:
@@ -916,9 +898,7 @@ class DateParser(dateparse_utilsListener):
         except Exception:
             pass
 
-    def exitTwelveHourTimeExpr(
-        self, ctx: dateparse_utilsParser.TwelveHourTimeExprContext
-    ) -> None:
+    def exitTwelveHourTimeExpr(self, ctx: dateparse_utilsParser.TwelveHourTimeExprContext) -> None:
         try:
             hour = ctx.hour().getText()
             while not hour[-1].isdigit():
index fb859719796c5cc377e328bacf2aecf2b6bb81f6..1cee5163a22179d3c634a078433e83c53add8109 100644 (file)
@@ -164,9 +164,7 @@ def time_to_datetime_today(time: datetime.time) -> datetime.datetime:
     return datetime.datetime.combine(now, time, tz)
 
 
-def date_and_time_to_datetime(
-    date: datetime.date, time: datetime.time
-) -> datetime.datetime:
+def date_and_time_to_datetime(date: datetime.date, time: datetime.time) -> datetime.datetime:
     """
     Given a date and time, merge them and return a datetime.
 
@@ -261,9 +259,7 @@ class TimeUnit(enum.IntEnum):
             return False
 
 
-def n_timeunits_from_base(
-    count: int, unit: TimeUnit, base: datetime.datetime
-) -> datetime.datetime:
+def n_timeunits_from_base(count: int, unit: TimeUnit, base: datetime.datetime) -> datetime.datetime:
     """Return a datetime that is N units before/after a base datetime.
     e.g.  3 Wednesdays from base datetime, 2 weeks from base date, 10
     years before base datetime, 13 minutes after base datetime, etc...
@@ -353,10 +349,7 @@ def n_timeunits_from_base(
             base += timedelta
             if base.year != old_year:
                 skips = holidays.US(years=base.year).keys()
-            if (
-                base.weekday() < 5
-                and datetime.date(base.year, base.month, base.day) not in skips
-            ):
+            if base.weekday() < 5 and datetime.date(base.year, base.month, base.day) not in skips:
                 count -= 1
         return base
 
@@ -825,9 +818,7 @@ def describe_timedelta_briefly(delta: datetime.timedelta) -> str:
     '1d 10m'
 
     """
-    return describe_duration_briefly(
-        int(delta.total_seconds())
-    )  # Note: drops milliseconds
+    return describe_duration_briefly(int(delta.total_seconds()))  # Note: drops milliseconds
 
 
 if __name__ == '__main__':
index a5c5afecb34c005d16a351cc703f44dc561b567d..68a9d69633f6babe78c80153753d5c4a41c150af 100644 (file)
@@ -148,9 +148,7 @@ def rate_limited(n_calls: int, *, per_period_in_seconds: float = 1.0) -> Callabl
                 logger.debug(f'@{time.time()}> calling it...')
                 ret = func(*args, **kargs)
                 last_invocation_timestamp[0] = time.time()
-                logger.debug(
-                    f'@{time.time()}> Last invocation <- {last_invocation_timestamp[0]}'
-                )
+                logger.debug(f'@{time.time()}> Last invocation <- {last_invocation_timestamp[0]}')
                 cv.notify()
             return ret
 
@@ -290,9 +288,7 @@ class _SingletonWrapper:
 
     def __call__(self, *args, **kwargs):
         """Returns a single instance of decorated class"""
-        logger.debug(
-            f"@singleton returning global instance of {self.__wrapped__.__name__}"
-        )
+        logger.debug(f"@singleton returning global instance of {self.__wrapped__.__name__}")
         if self._instance is None:
             self._instance = self.__wrapped__(*args, **kwargs)
         return self._instance
@@ -594,9 +590,7 @@ class _Timeout(object):
         self.__limit = kwargs.pop("timeout", self.__limit)
         self.__queue = multiprocessing.Queue(1)
         args = (self.__queue, self.__function) + args
-        self.__process = multiprocessing.Process(
-            target=_target, args=args, kwargs=kwargs
-        )
+        self.__process = multiprocessing.Process(target=_target, args=args, kwargs=kwargs)
         self.__process.daemon = True
         self.__process.start()
         if self.__limit is not None:
@@ -693,9 +687,7 @@ def timeout(
 
             @functools.wraps(function)
             def new_function(*args, **kwargs):
-                timeout_wrapper = _Timeout(
-                    function, timeout_exception, error_message, seconds
-                )
+                timeout_wrapper = _Timeout(function, timeout_exception, error_message, seconds)
                 return timeout_wrapper(*args, **kwargs)
 
             return new_function
index b4cb06b4c9ad80816f0bb13e05e23863846fa8c1..2f735b7c568638f6d8419d3b4cc2476aeb3f3b9f 100644 (file)
@@ -146,9 +146,7 @@ class ThreadExecutor(BaseExecutor):
         for arg in args:
             newargs.append(arg)
         start = time.time()
-        result = self._thread_pool_executor.submit(
-            self.run_local_bundle, *newargs, **kwargs
-        )
+        result = self._thread_pool_executor.submit(self.run_local_bundle, *newargs, **kwargs)
         result.add_done_callback(lambda _: self.histogram.add_item(time.time() - start))
         result.add_done_callback(lambda _: self.adjust_task_count(-1))
         return result
@@ -289,9 +287,7 @@ class RemoteExecutorStatus:
         self.start_time: float = time.time()
         self.start_per_bundle: Dict[str, Optional[float]] = defaultdict(float)
         self.end_per_bundle: Dict[str, float] = defaultdict(float)
-        self.finished_bundle_timings_per_worker: Dict[
-            RemoteWorkerRecord, List[float]
-        ] = {}
+        self.finished_bundle_timings_per_worker: Dict[RemoteWorkerRecord, List[float]] = {}
         self.in_flight_bundles_by_worker: Dict[RemoteWorkerRecord, Set[str]] = {}
         self.bundle_details_by_uuid: Dict[str, BundleDetails] = {}
         self.finished_bundle_timings: List[float] = []
@@ -306,9 +302,7 @@ class RemoteExecutorStatus:
         with self.lock:
             self.record_acquire_worker_already_locked(worker, uuid)
 
-    def record_acquire_worker_already_locked(
-        self, worker: RemoteWorkerRecord, uuid: str
-    ) -> None:
+    def record_acquire_worker_already_locked(self, worker: RemoteWorkerRecord, uuid: str) -> None:
         assert self.lock.locked()
         self.known_workers.add(worker)
         self.start_per_bundle[uuid] = None
@@ -472,9 +466,7 @@ class WeightedRandomRemoteWorkerSelectionPolicy(RemoteWorkerSelectionPolicy):
                         grabbag.append(worker)
 
         if len(grabbag) == 0:
-            logger.debug(
-                f'There are no available workers that avoid {machine_to_avoid}...'
-            )
+            logger.debug(f'There are no available workers that avoid {machine_to_avoid}...')
             for worker in self.workers:
                 if worker.count > 0:
                     for _ in range(worker.count * worker.weight):
@@ -503,9 +495,7 @@ class RoundRobinRemoteWorkerSelectionPolicy(RemoteWorkerSelectionPolicy):
         return False
 
     @overrides
-    def acquire_worker(
-        self, machine_to_avoid: str = None
-    ) -> Optional[RemoteWorkerRecord]:
+    def acquire_worker(self, machine_to_avoid: str = None) -> Optional[RemoteWorkerRecord]:
         x = self.index
         while True:
             worker = self.workers[x]
@@ -544,9 +534,7 @@ class RemoteExecutor(BaseExecutor):
             raise RemoteExecutorException(msg)
         self.policy.register_worker_pool(self.workers)
         self.cv = threading.Condition()
-        logger.debug(
-            f'Creating {self.worker_count} local threads, one per remote worker.'
-        )
+        logger.debug(f'Creating {self.worker_count} local threads, one per remote worker.')
         self._helper_executor = fut.ThreadPoolExecutor(
             thread_name_prefix="remote_executor_helper",
             max_workers=self.worker_count,
@@ -628,21 +616,15 @@ class RemoteExecutor(BaseExecutor):
                             if start_ts is not None:
                                 runtime = now - start_ts
                                 score += runtime
-                                logger.debug(
-                                    f'score[{bundle}] => {score}  # latency boost'
-                                )
+                                logger.debug(f'score[{bundle}] => {score}  # latency boost')
 
                                 if bundle.slower_than_local_p95:
                                     score += runtime / 2
-                                    logger.debug(
-                                        f'score[{bundle}] => {score}  # >worker p95'
-                                    )
+                                    logger.debug(f'score[{bundle}] => {score}  # >worker p95')
 
                                 if bundle.slower_than_global_p95:
                                     score += runtime / 4
-                                    logger.debug(
-                                        f'score[{bundle}] => {score}  # >global p95'
-                                    )
+                                    logger.debug(f'score[{bundle}] => {score}  # >global p95')
 
                             # Prefer backups of bundles that don't
                             # have backups already.
@@ -659,9 +641,7 @@ class RemoteExecutor(BaseExecutor):
                                 f'score[{bundle}] => {score}  # {backup_count} dup backup factor'
                             )
 
-                            if score != 0 and (
-                                best_score is None or score > best_score
-                            ):
+                            if score != 0 and (best_score is None or score > best_score):
                                 bundle_to_backup = bundle
                                 assert bundle is not None
                                 assert bundle.backup_bundles is not None
@@ -686,14 +666,10 @@ class RemoteExecutor(BaseExecutor):
     def is_worker_available(self) -> bool:
         return self.policy.is_worker_available()
 
-    def acquire_worker(
-        self, machine_to_avoid: str = None
-    ) -> Optional[RemoteWorkerRecord]:
+    def acquire_worker(self, machine_to_avoid: str = None) -> Optional[RemoteWorkerRecord]:
         return self.policy.acquire_worker(machine_to_avoid)
 
-    def find_available_worker_or_block(
-        self, machine_to_avoid: str = None
-    ) -> RemoteWorkerRecord:
+    def find_available_worker_or_block(self, machine_to_avoid: str = None) -> RemoteWorkerRecord:
         with self.cv:
             while not self.is_worker_available():
                 self.cv.wait()
@@ -757,9 +733,7 @@ class RemoteExecutor(BaseExecutor):
             try:
                 return self.process_work_result(bundle)
             except Exception as e:
-                logger.warning(
-                    f'{bundle}: bundle says it\'s cancelled upfront but no results?!'
-                )
+                logger.warning(f'{bundle}: bundle says it\'s cancelled upfront but no results?!')
                 self.release_worker(bundle)
                 if is_original:
                     # Weird.  We are the original owner of this
@@ -788,9 +762,7 @@ class RemoteExecutor(BaseExecutor):
         # Send input code / data to worker machine if it's not local.
         if hostname not in machine:
             try:
-                cmd = (
-                    f'{SCP} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
-                )
+                cmd = f'{SCP} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
                 start_ts = time.time()
                 logger.info(f"{bundle}: Copying work to {worker} via {cmd}.")
                 run_silently(cmd)
@@ -831,9 +803,7 @@ class RemoteExecutor(BaseExecutor):
         logger.debug(f'{bundle}: Executing {cmd} in the background to kick off work...')
         p = cmd_in_background(cmd, silent=True)
         bundle.pid = p.pid
-        logger.debug(
-            f'{bundle}: Local ssh process pid={p.pid}; remote worker is {machine}.'
-        )
+        logger.debug(f'{bundle}: Local ssh process pid={p.pid}; remote worker is {machine}.')
         return self.wait_for_process(p, bundle, 0)
 
     def wait_for_process(
@@ -858,9 +828,7 @@ class RemoteExecutor(BaseExecutor):
                 p.wait(timeout=0.25)
             except subprocess.TimeoutExpired:
                 if self.check_if_cancelled(bundle):
-                    logger.info(
-                        f'{bundle}: looks like another worker finished bundle...'
-                    )
+                    logger.info(f'{bundle}: looks like another worker finished bundle...')
                     break
             else:
                 logger.info(f"{bundle}: pid {pid} ({machine}) is finished!")
@@ -928,12 +896,9 @@ class RemoteExecutor(BaseExecutor):
                             break
 
                     run_silently(
-                        f'{SSH} {username}@{machine}'
-                        f' "/bin/rm -f {code_file} {result_file}"'
-                    )
-                    logger.debug(
-                        f'Fetching results back took {time.time() - bundle.end_ts:.1f}s.'
+                        f'{SSH} {username}@{machine}' f' "/bin/rm -f {code_file} {result_file}"'
                     )
+                    logger.debug(f'Fetching results back took {time.time() - bundle.end_ts:.1f}s.')
                 dur = bundle.end_ts - bundle.start_ts
                 self.histogram.add_item(dur)
 
@@ -968,9 +933,7 @@ class RemoteExecutor(BaseExecutor):
             # backup.
             if bundle.backup_bundles is not None:
                 for backup in bundle.backup_bundles:
-                    logger.debug(
-                        f'{bundle}: Notifying backup {backup.uuid} that it\'s cancelled'
-                    )
+                    logger.debug(f'{bundle}: Notifying backup {backup.uuid} that it\'s cancelled')
                     backup.is_cancelled.set()
 
         # This is a backup job and, by now, we have already fetched
@@ -985,9 +948,7 @@ class RemoteExecutor(BaseExecutor):
             if not was_cancelled:
                 orig_bundle = bundle.src_bundle
                 assert orig_bundle is not None
-                logger.debug(
-                    f'{bundle}: Notifying original {orig_bundle.uuid} we beat them to it.'
-                )
+                logger.debug(f'{bundle}: Notifying original {orig_bundle.uuid} we beat them to it.')
                 orig_bundle.is_cancelled.set()
         self.release_worker(bundle, was_cancelled=was_cancelled)
         return result
@@ -1072,9 +1033,7 @@ class RemoteExecutor(BaseExecutor):
         # they will move the result_file to this machine and let
         # the original pick them up and unpickle them.
 
-    def emergency_retry_nasty_bundle(
-        self, bundle: BundleDetails
-    ) -> Optional[fut.Future]:
+    def emergency_retry_nasty_bundle(self, bundle: BundleDetails) -> Optional[fut.Future]:
         is_original = bundle.src_bundle is None
         bundle.worker = None
         avoid_last_machine = bundle.machine
@@ -1136,9 +1095,7 @@ class DefaultExecutors(object):
     def ping(self, host) -> bool:
         logger.debug(f'RUN> ping -c 1 {host}')
         try:
-            x = cmd_with_timeout(
-                f'ping -c 1 {host} >/dev/null 2>/dev/null', timeout_seconds=1.0
-            )
+            x = cmd_with_timeout(f'ping -c 1 {host} >/dev/null 2>/dev/null', timeout_seconds=1.0)
             return x == 0
         except Exception:
             return False
index deda45e033f6a5adb65a4b1d7725e2b346030797..6bcfc75db47efa6d2da2327cdd42284c589901d4 100644 (file)
@@ -347,9 +347,7 @@ def set_file_raw_atime_and_mtime(filename: str, ts: float = None):
         os.utime(filename, None)
 
 
-def convert_file_timestamp_to_datetime(
-    filename: str, producer
-) -> Optional[datetime.datetime]:
+def convert_file_timestamp_to_datetime(filename: str, producer) -> Optional[datetime.datetime]:
     ts = producer(filename)
     if ts is not None:
         return datetime.datetime.fromtimestamp(ts)
@@ -389,9 +387,7 @@ def get_file_mtime_age_seconds(filename: str) -> Optional[int]:
     return get_file_timestamp_age_seconds(filename, lambda x: x.st_mtime)
 
 
-def get_file_timestamp_timedelta(
-    filename: str, extractor
-) -> Optional[datetime.timedelta]:
+def get_file_timestamp_timedelta(filename: str, extractor) -> Optional[datetime.timedelta]:
     age = get_file_timestamp_age_seconds(filename, extractor)
     if age is not None:
         return datetime.timedelta(seconds=float(age))
index 38134b20d02f00f8d419b5ddfef669c5002a4abf..2d429147937d01b37a057910c50004374917af0f 100644 (file)
@@ -129,10 +129,7 @@ class LockFile(object):
         if self.locktime:
             ts = datetime.datetime.now().timestamp()
             duration = ts - self.locktime
-            if (
-                duration
-                >= config.config['lockfile_held_duration_warning_threshold_sec']
-            ):
+            if duration >= config.config['lockfile_held_duration_warning_threshold_sec']:
                 str_duration = datetime_utils.describe_duration_briefly(duration)
                 msg = f'Held {self.lockfile} for {str_duration}'
                 logger.warning(msg)
index 8875b2fcb3d833f900c1107e7203abc27a49f54c..5dbc55260ec1970151433e86858e1e3e404dd0c9 100644 (file)
@@ -257,9 +257,7 @@ class DynamicPerScopeLoggingLevelFilter(logging.Filter):
         per_scope_logging_levels: str,
     ) -> None:
         super().__init__()
-        self.valid_levels = set(
-            ['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
-        )
+        self.valid_levels = set(['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])
         self.default_logging_level = default_logging_level
         self.level_by_scope = {}
         if per_scope_logging_levels is not None:
@@ -286,9 +284,9 @@ class DynamicPerScopeLoggingLevelFilter(logging.Filter):
                         file=sys.stderr,
                     )
                     continue
-                self.level_by_scope[
-                    scope
-                ] = DynamicPerScopeLoggingLevelFilter.level_name_to_level(level)
+                self.level_by_scope[scope] = DynamicPerScopeLoggingLevelFilter.level_name_to_level(
+                    level
+                )
 
     @overrides
     def filter(self, record: logging.LogRecord) -> bool:
@@ -386,9 +384,7 @@ class MillisecondAwareFormatter(logging.Formatter):
 
     @overrides
     def formatTime(self, record, datefmt=None):
-        ct = MillisecondAwareFormatter.converter(
-            record.created, pytz.timezone("US/Pacific")
-        )
+        ct = MillisecondAwareFormatter.converter(record.created, pytz.timezone("US/Pacific"))
         if datefmt:
             s = ct.strftime(datefmt)
         else:
@@ -398,36 +394,29 @@ class MillisecondAwareFormatter(logging.Formatter):
 
 
 def log_about_logging(
-    logger, default_logging_level, preexisting_handlers_count, fmt, facility_name
+    logger,
+    default_logging_level,
+    preexisting_handlers_count,
+    fmt,
+    facility_name,
 ):
-    level_name = logging._levelToName.get(
-        default_logging_level, str(default_logging_level)
-    )
+    level_name = logging._levelToName.get(default_logging_level, str(default_logging_level))
     logger.debug(f'Initialized global logging; default logging level is {level_name}.')
-    if (
-        config.config['logging_clear_preexisting_handlers']
-        and preexisting_handlers_count > 0
-    ):
+    if config.config['logging_clear_preexisting_handlers'] and preexisting_handlers_count > 0:
         msg = f'Logging cleared {preexisting_handlers_count} global handlers (--logging_clear_preexisting_handlers)'
         logger.warning(msg)
     logger.debug(f'Logging format specification is "{fmt}"')
     if config.config['logging_debug_threads']:
-        logger.debug(
-            '...Logging format spec captures tid/pid (--logging_debug_threads)'
-        )
+        logger.debug('...Logging format spec captures tid/pid (--logging_debug_threads)')
     if config.config['logging_debug_modules']:
         logger.debug(
             '...Logging format spec captures files/functions/lineno (--logging_debug_modules)'
         )
     if config.config['logging_syslog']:
-        logger.debug(
-            f'Logging to syslog as {facility_name} with priority mapping based on level'
-        )
+        logger.debug(f'Logging to syslog as {facility_name} with priority mapping based on level')
     if config.config['logging_filename']:
         logger.debug(f'Logging to filename {config.config["logging_filename"]}')
-        logger.debug(
-            f'...with {config.config["logging_filename_maxsize"]} bytes max file size.'
-        )
+        logger.debug(f'...with {config.config["logging_filename_maxsize"]} bytes max file size.')
         logger.debug(
             f'...and {config.config["logging_filename_count"]} rotating backup file count.'
         )
@@ -487,9 +476,7 @@ def initialize_logging(logger=None) -> logging.Logger:
     handler: Optional[logging.Handler] = None
 
     # Global default logging level (--logging_level)
-    default_logging_level = getattr(
-        logging, config.config['logging_level'].upper(), None
-    )
+    default_logging_level = getattr(logging, config.config['logging_level'].upper(), None)
     if not isinstance(default_logging_level, int):
         raise ValueError('Invalid level: %s' % config.config['logging_level'])
 
index 4295aa0892fd0a67e8af778aa92f92a30d58e436..76c2f86264be3fc49960166fe7a1b702f90e6c4b 100644 (file)
@@ -168,9 +168,7 @@ class Corpus(object):
     def invert_docid_set(self, original: Set[str]) -> Set[str]:
         """Invert a set of docids."""
 
-        return set(
-            [docid for docid in self.documents_by_docid.keys() if docid not in original]
-        )
+        return set([docid for docid in self.documents_by_docid.keys() if docid not in original])
 
     def get_doc(self, docid: str) -> Optional[Document]:
         """Given a docid, retrieve the previously added Document."""
@@ -250,9 +248,7 @@ class Corpus(object):
                     operation = Operation.from_token(token)
                     operand_count = operation.num_operands()
                     if len(node_stack) < operand_count:
-                        raise ParseError(
-                            f"Incorrect number of operations for {operation}"
-                        )
+                        raise ParseError(f"Incorrect number of operations for {operation}")
                     for _ in range(operation.num_operands()):
                         args.append(node_stack.pop())
                     node = Node(corpus, operation, args)
@@ -342,9 +338,7 @@ class Node(object):
                         try:
                             key, value = tag.split(":")
                         except ValueError as v:
-                            raise ParseError(
-                                f'Invalid key:value syntax at "{tag}"'
-                            ) from v
+                            raise ParseError(f'Invalid key:value syntax at "{tag}"') from v
                         if value == "*":
                             r = self.corpus.get_docids_with_property(key)
                         else:
index a37885ce3e92f20d11bb61f4268407689829bca4..12ccb3c6c0508e61081d6791b12f9a8f5a5571f2 100644 (file)
@@ -28,7 +28,8 @@ from decorator_utils import timed
 logger = logging.getLogger(__file__)
 
 parser = config.add_commandline_args(
-    f"ML Model Trainer ({__file__})", "Arguments related to training an ML model"
+    f"ML Model Trainer ({__file__})",
+    "Arguments related to training an ML model",
 )
 parser.add_argument(
     "--ml_trainer_quiet",
@@ -217,17 +218,12 @@ class TrainingBlueprint(ABC):
                 try:
                     (key, value) = line.split(self.spec.key_value_delimiter)
                 except Exception:
-                    logger.debug(
-                        f"WARNING: bad line in file {filename} '{line}', skipped"
-                    )
+                    logger.debug(f"WARNING: bad line in file {filename} '{line}', skipped")
                     continue
 
                 key = key.strip()
                 value = value.strip()
-                if (
-                    self.spec.features_to_skip is not None
-                    and key in self.spec.features_to_skip
-                ):
+                if self.spec.features_to_skip is not None and key in self.spec.features_to_skip:
                     logger.debug(f"Skipping feature {key}")
                     continue
 
@@ -321,9 +317,7 @@ class TrainingBlueprint(ABC):
 
     # Note: children should implement.  Consider using @parallelize.
     @abstractmethod
-    def train_model(
-        self, parameters, X_train_scaled: np.ndarray, y_train: np.ndarray
-    ) -> Any:
+    def train_model(self, parameters, X_train_scaled: np.ndarray, y_train: np.ndarray) -> Any:
         pass
 
     def evaluate_model(
@@ -368,8 +362,7 @@ Testing set score: {test_score:.2f}%"""
                 self.spec.persist_percentage_threshold is not None
                 and test_score > self.spec.persist_percentage_threshold
             ) or (
-                not self.spec.quiet
-                and input_utils.yn_response("Write the model? [y,n]: ") == "y"
+                not self.spec.quiet and input_utils.yn_response("Write the model? [y,n]: ") == "y"
             ):
                 scaler_filename = f"{self.spec.basename}_scaler.sav"
                 with open(scaler_filename, "wb") as fb:
index 16f51c04160e8b05f7de83894b13d06cf11bc8e4..119931b8ccba607ccc48321ac6f3d6dd3dd5b791 100644 (file)
@@ -150,20 +150,15 @@ class persistent_autoloaded_singleton(object):
                 logger.debug(f'Attempting to instantiate {cls.__name__} directly.')
                 self.instance = cls(*args, **kwargs)
             else:
-                logger.debug(
-                    f'Class {cls.__name__} was loaded from persisted state successfully.'
-                )
+                logger.debug(f'Class {cls.__name__} was loaded from persisted state successfully.')
                 was_loaded = True
 
             assert self.instance is not None
 
             if self.persist_at_shutdown is PersistAtShutdown.ALWAYS or (
-                not was_loaded
-                and self.persist_at_shutdown is PersistAtShutdown.IF_NOT_LOADED
+                not was_loaded and self.persist_at_shutdown is PersistAtShutdown.IF_NOT_LOADED
             ):
-                logger.debug(
-                    'Scheduling a deferred called to save at process shutdown time.'
-                )
+                logger.debug('Scheduling a deferred called to save at process shutdown time.')
                 atexit.register(self.instance.save)
             return self.instance
 
index 240e7da84412f089702b7b50bd4ce5b5080ca0eb..80bfffa9db61b1639d5ff19aa9a12df387bdb90d 100644 (file)
@@ -124,15 +124,11 @@ class GoogleLight(BaseLight):
 
     @overrides
     def turn_on(self) -> bool:
-        return GoogleLight.parse_google_response(
-            ask_google(f"turn {self.goog_name()} on")
-        )
+        return GoogleLight.parse_google_response(ask_google(f"turn {self.goog_name()} on"))
 
     @overrides
     def turn_off(self) -> bool:
-        return GoogleLight.parse_google_response(
-            ask_google(f"turn {self.goog_name()} off")
-        )
+        return GoogleLight.parse_google_response(ask_google(f"turn {self.goog_name()} off"))
 
     @overrides
     def status(self) -> str:
@@ -187,9 +183,7 @@ class GoogleLight(BaseLight):
 
     @overrides
     def make_color(self, color: str) -> bool:
-        return GoogleLight.parse_google_response(
-            ask_google(f"make {self.goog_name()} {color}")
-        )
+        return GoogleLight.parse_google_response(ask_google(f"make {self.goog_name()} {color}"))
 
 
 class TuyaLight(BaseLight):
index d29fc4adb44335dce5772a504b539a2b2803faa9..500ea05372dd200444ba3268427b1d6f814850c9 100644 (file)
@@ -221,15 +221,11 @@ class GoogleOutlet(BaseOutlet):
 
     @overrides
     def turn_on(self) -> bool:
-        return GoogleOutlet.parse_google_response(
-            ask_google(f'turn {self.goog_name()} on')
-        )
+        return GoogleOutlet.parse_google_response(ask_google(f'turn {self.goog_name()} on'))
 
     @overrides
     def turn_off(self) -> bool:
-        return GoogleOutlet.parse_google_response(
-            ask_google(f'turn {self.goog_name()} off')
-        )
+        return GoogleOutlet.parse_google_response(ask_google(f'turn {self.goog_name()} off'))
 
     @overrides
     def is_on(self) -> bool:
@@ -258,9 +254,7 @@ class MerossWrapper(object):
     def __init__(self):
         self.loop = asyncio.get_event_loop()
         self.email = os.environ.get('MEROSS_EMAIL') or scott_secrets.MEROSS_EMAIL
-        self.password = (
-            os.environ.get('MEROSS_PASSWORD') or scott_secrets.MEROSS_PASSWORD
-        )
+        self.password = os.environ.get('MEROSS_PASSWORD') or scott_secrets.MEROSS_PASSWORD
         self.devices = self.loop.run_until_complete(self.find_meross_devices())
         atexit.register(self.loop.close)
 
index fe5eed18d28ef2851f9bbc6589ff528bfd2060f5..dff84f6250dfb410f11ed6dd81ef42d87fd54148 100644 (file)
@@ -1,8 +1,8 @@
 #!/usr/bin/env python3
 
 import logging
-from typing import Optional
 import urllib.request
+from typing import Optional
 
 logger = logging.getLogger()
 
@@ -21,9 +21,7 @@ class ThermometerRegistry(object):
             'cabin_hottub': ('192.168.0.107', 'hottub_temp'),
         }
 
-    def read_temperature(
-            self, location: str, *, convert_to_fahrenheit=False
-    ) -> Optional[float]:
+    def read_temperature(self, location: str, *, convert_to_fahrenheit=False) -> Optional[float]:
         record = self.thermometers.get(location, None)
         if record is None:
             logger.error(
@@ -37,7 +35,7 @@ class ThermometerRegistry(object):
             temp = www.read().decode('utf-8')
             temp = float(temp)
             if convert_to_fahrenheit:
-                temp *= (9/5)
+                temp *= 9 / 5
                 temp += 32.0
                 temp = round(temp)
         except Exception as e:
index 12b94aec39c37b084495d92bbc07b386d0574610..e59231560e900305eaadab380746e5b671062242 100644 (file)
@@ -177,9 +177,7 @@ class WaitableAutomaticStateTracker(AutomaticStateTracker):
         override_sleep_delay: Optional[float] = None,
     ) -> None:
         self._something_changed = threading.Event()
-        super().__init__(
-            update_ids_to_update_secs, override_sleep_delay=override_sleep_delay
-        )
+        super().__init__(update_ids_to_update_secs, override_sleep_delay=override_sleep_delay)
 
     def something_changed(self):
         self._something_changed.set()
index bae59068b9063cece6fe401f649401e07efa138d..6f3cc90ed46f5c238b0887848c1cf7504ec3bcc0 100644 (file)
@@ -72,9 +72,7 @@ URLS_RE = re.compile(r"({})".format(URLS_RAW_STRING), re.IGNORECASE)
 
 ESCAPED_AT_SIGN = re.compile(r'(?!"[^"]*)@+(?=[^"]*")|\\@')
 
-EMAILS_RAW_STRING = (
-    r"[a-zA-Z\d._\+\-'`!%#$&*/=\?\^\{\}\|~\\]+@[a-z\d-]+\.?[a-z\d-]+\.[a-z]{2,4}"
-)
+EMAILS_RAW_STRING = r"[a-zA-Z\d._\+\-'`!%#$&*/=\?\^\{\}\|~\\]+@[a-z\d-]+\.?[a-z\d-]+\.[a-z]{2,4}"
 
 EMAIL_RE = re.compile(r"^{}$".format(EMAILS_RAW_STRING))
 
@@ -84,13 +82,9 @@ CAMEL_CASE_TEST_RE = re.compile(r"^[a-zA-Z]*([a-z]+[A-Z]+|[A-Z]+[a-z]+)[a-zA-Z\d
 
 CAMEL_CASE_REPLACE_RE = re.compile(r"([a-z]|[A-Z]+)(?=[A-Z])")
 
-SNAKE_CASE_TEST_RE = re.compile(
-    r"^([a-z]+\d*_[a-z\d_]*|_+[a-z\d]+[a-z\d_]*)$", re.IGNORECASE
-)
+SNAKE_CASE_TEST_RE = re.compile(r"^([a-z]+\d*_[a-z\d_]*|_+[a-z\d]+[a-z\d_]*)$", re.IGNORECASE)
 
-SNAKE_CASE_TEST_DASH_RE = re.compile(
-    r"([a-z]+\d*-[a-z\d-]*|-+[a-z\d]+[a-z\d-]*)$", re.IGNORECASE
-)
+SNAKE_CASE_TEST_DASH_RE = re.compile(r"([a-z]+\d*-[a-z\d-]*|-+[a-z\d]+[a-z\d-]*)$", re.IGNORECASE)
 
 SNAKE_CASE_REPLACE_RE = re.compile(r"(_)([a-z\d])")
 
@@ -107,9 +101,7 @@ CREDIT_CARDS = {
 
 JSON_WRAPPER_RE = re.compile(r"^\s*[\[{]\s*(.*)\s*[\}\]]\s*$", re.MULTILINE | re.DOTALL)
 
-UUID_RE = re.compile(
-    r"^[a-f\d]{8}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{12}$", re.IGNORECASE
-)
+UUID_RE = re.compile(r"^[a-f\d]{8}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{12}$", re.IGNORECASE)
 
 UUID_HEX_OK_RE = re.compile(
     r"^[a-f\d]{8}-?[a-f\d]{4}-?[a-f\d]{4}-?[a-f\d]{4}-?[a-f\d]{12}$",
@@ -126,9 +118,7 @@ ANYWHERE_IP_V6_RE = re.compile(r"([a-z\d]{0,4}:){7}[a-z\d]{0,4}", re.IGNORECASE)
 
 MAC_ADDRESS_RE = re.compile(r"^([0-9A-F]{2}[:-]){5}([0-9A-F]{2})$", re.IGNORECASE)
 
-ANYWHERE_MAC_ADDRESS_RE = re.compile(
-    r"([0-9A-F]{2}[:-]){5}([0-9A-F]{2})", re.IGNORECASE
-)
+ANYWHERE_MAC_ADDRESS_RE = re.compile(r"([0-9A-F]{2}[:-]){5}([0-9A-F]{2})", re.IGNORECASE)
 
 WORDS_COUNT_RE = re.compile(r"\W*[^\W_]+\W*", re.IGNORECASE | re.MULTILINE | re.UNICODE)
 
@@ -418,9 +408,7 @@ def add_thousands_separator(in_str: str, *, separator_char=',', places=3) -> str
     if isinstance(in_str, numbers.Number):
         in_str = f'{in_str}'
     if is_number(in_str):
-        return _add_thousands_separator(
-            in_str, separator_char=separator_char, places=places
-        )
+        return _add_thousands_separator(in_str, separator_char=separator_char, places=places)
     raise ValueError(in_str)
 
 
@@ -1368,11 +1356,13 @@ def make_contractions(txt: str) -> str:
 
     # Special cases: can't, shan't and won't.
     txt = re.sub(r'\b(can)\s*no(t)\b', r"\1'\2", txt, count=0, flags=re.IGNORECASE)
+    txt = re.sub(r'\b(sha)ll\s*(n)o(t)\b', r"\1\2'\3", txt, count=0, flags=re.IGNORECASE)
     txt = re.sub(
-        r'\b(sha)ll\s*(n)o(t)\b', r"\1\2'\3", txt, count=0, flags=re.IGNORECASE
-    )
-    txt = re.sub(
-        r'\b(w)ill\s*(n)(o)(t)\b', r"\1\3\2'\4", txt, count=0, flags=re.IGNORECASE
+        r'\b(w)ill\s*(n)(o)(t)\b',
+        r"\1\3\2'\4",
+        txt,
+        count=0,
+        flags=re.IGNORECASE,
     )
 
     for first_list, second_list in first_second:
@@ -1590,9 +1580,7 @@ def chunk(txt: str, chunk_size):
         yield txt[x : x + chunk_size]
 
 
-def to_bitstring(
-    txt: str, *, delimiter='', encoding='utf-8', errors='surrogatepass'
-) -> str:
+def to_bitstring(txt: str, *, delimiter='', encoding='utf-8', errors='surrogatepass') -> str:
     """Encode txt and then chop it into bytes.  Note: only bitstrings
     with delimiter='' are interpretable by from_bitstring.
 
index 3122b984c4459e046068ee7adbbb9cffcea819dc..c4b3b0c5bd22654aa63d2890b89e86c779ca61b6 100755 (executable)
@@ -2,12 +2,11 @@
 
 import unittest
 
-from type.centcount import CentCount
 import unittest_utils as uu
+from type.centcount import CentCount
 
 
 class TestCentCount(unittest.TestCase):
-
     def test_basic_utility(self):
         amount = CentCount(1.45)
         another = CentCount.parse("USD 1.45")
index 3b3b80282932c01a18aa60f5ff7e8419aa35c223..42df954c7749ed0f45e5b0ea3d17379e9ff29cf4 100755 (executable)
@@ -10,184 +10,135 @@ import pytz
 import dateparse.dateparse_utils as du
 import unittest_utils as uu
 
-
 parsable_expressions = [
-    ('today',
-     datetime.datetime(2021, 7, 2)),
-    ('tomorrow',
-     datetime.datetime(2021, 7, 3)),
-    ('yesterday',
-     datetime.datetime(2021, 7, 1)),
-    ('21:30',
-     datetime.datetime(2021, 7, 2, 21, 30, 0, 0)),
-    ('12:01am',
-     datetime.datetime(2021, 7, 2, 0, 1, 0, 0)),
-    ('12:02p',
-     datetime.datetime(2021, 7, 2, 12, 2, 0, 0)),
-    ('0:03',
-     datetime.datetime(2021, 7, 2, 0, 3, 0, 0)),
-    ('last wednesday',
-     datetime.datetime(2021, 6, 30)),
-    ('this wed',
-     datetime.datetime(2021, 7, 7)),
-    ('next wed',
-     datetime.datetime(2021, 7, 14)),
-    ('this coming tues',
-     datetime.datetime(2021, 7, 6)),
-    ('this past monday',
-     datetime.datetime(2021, 6, 28)),
-    ('4 days ago',
-     datetime.datetime(2021, 6, 28)),
-    ('4 mondays ago',
-     datetime.datetime(2021, 6, 7)),
-    ('4 months ago',
-     datetime.datetime(2021, 3, 2)),
-    ('3 days back',
-     datetime.datetime(2021, 6, 29)),
-    ('13 weeks from now',
-     datetime.datetime(2021, 10, 1)),
-    ('1 year from now',
-     datetime.datetime(2022, 7, 2)),
-    ('4 weeks from now',
-     datetime.datetime(2021, 7, 30)),
-    ('3 saturdays ago',
-     datetime.datetime(2021, 6, 12)),
-    ('4 months from today',
-     datetime.datetime(2021, 11, 2)),
-    ('4 years from yesterday',
-     datetime.datetime(2025, 7, 1)),
-    ('4 weeks from tomorrow',
-     datetime.datetime(2021, 7, 31)),
-    ('april 15, 2005',
-     datetime.datetime(2005, 4, 15)),
-    ('april 14',
-     datetime.datetime(2021, 4, 14)),
-    ('9:30am on last wednesday',
-     datetime.datetime(2021, 6, 30, 9, 30)),
-    ('2005/apr/15',
-     datetime.datetime(2005, 4, 15)),
-    ('2005 apr 15',
-     datetime.datetime(2005, 4, 15)),
-    ('the 1st wednesday in may',
-     datetime.datetime(2021, 5, 5)),
-    ('last sun of june',
-     datetime.datetime(2021, 6, 27)),
-    ('this Easter',
-     datetime.datetime(2021, 4, 4)),
-    ('last christmas',
-     datetime.datetime(2020, 12, 25)),
-    ('last Xmas',
-     datetime.datetime(2020, 12, 25)),
-    ('xmas, 1999',
-     datetime.datetime(1999, 12, 25)),
-    ('next mlk day',
-     datetime.datetime(2022, 1, 17)),
-    ('Halloween, 2020',
-     datetime.datetime(2020, 10, 31)),
-    ('5 work days after independence day',
-     datetime.datetime(2021, 7, 12)),
-    ('50 working days from last wed',
-     datetime.datetime(2021, 9, 10)),
-    ('25 working days before columbus day',
-     datetime.datetime(2021, 9, 3)),
-    ('today +1 week',
-     datetime.datetime(2021, 7, 9)),
-    ('sunday -3 weeks',
-     datetime.datetime(2021, 6, 13)),
-    ('4 weeks before xmas, 1999',
-     datetime.datetime(1999, 11, 27)),
-    ('3 days before new years eve, 2000',
-     datetime.datetime(2000, 12, 28)),
-    ('july 4th',
-     datetime.datetime(2021, 7, 4)),
-    ('the ides of march',
-     datetime.datetime(2021, 3, 15)),
-    ('the nones of april',
-     datetime.datetime(2021, 4, 5)),
-    ('the kalends of may',
-     datetime.datetime(2021, 5, 1)),
-    ('9/11/2001',
-     datetime.datetime(2001, 9, 11)),
-    ('4 sundays before veterans\' day',
-     datetime.datetime(2021, 10, 17)),
-    ('xmas eve',
-     datetime.datetime(2021, 12, 24)),
-    ('this friday at 5pm',
-     datetime.datetime(2021, 7, 9, 17, 0, 0)),
-    ('presidents day',
-     datetime.datetime(2021, 2, 15)),
-    ('memorial day, 1921',
-     datetime.datetime(1921, 5, 30)),
-    ('today -4 wednesdays',
-     datetime.datetime(2021, 6, 9)),
-    ('thanksgiving',
-     datetime.datetime(2021, 11, 25)),
-    ('2 sun in jun',
-     datetime.datetime(2021, 6, 13)),
-    ('easter -40 days',
-     datetime.datetime(2021, 2, 23)),
-    ('easter +39 days',
-     datetime.datetime(2021, 5, 13)),
-    ('1st tuesday in nov, 2024',
-     datetime.datetime(2024, 11, 5)),
-    ('2 days before last xmas at 3:14:15.92a',
-     datetime.datetime(2020, 12, 23, 3, 14, 15, 92)),
-    ('3 weeks after xmas, 1995 at midday',
-     datetime.datetime(1996, 1, 15, 12, 0, 0)),
-    ('4 months before easter, 1992 at midnight',
-     datetime.datetime(1991, 12, 19)),
-    ('5 months before halloween, 1995 at noon',
-     datetime.datetime(1995, 5, 31, 12)),
-    ('4 days before last wednesday',
-     datetime.datetime(2021, 6, 26)),
-    ('44 months after today',
-     datetime.datetime(2025, 3, 2)),
-    ('44 years before today',
-     datetime.datetime(1977, 7, 2)),
-    ('44 weeks ago',
-     datetime.datetime(2020, 8, 28)),
-    ('15 minutes to 3am',
-     datetime.datetime(2021, 7, 2, 2, 45)),
-    ('quarter past 4pm',
-     datetime.datetime(2021, 7, 2, 16, 15)),
-    ('half past 9',
-     datetime.datetime(2021, 7, 2, 9, 30)),
-    ('4 seconds to midnight',
-     datetime.datetime(2021, 7, 1, 23, 59, 56)),
-    ('4 seconds to midnight, tomorrow',
-     datetime.datetime(2021, 7, 2, 23, 59, 56)),
-    ('2021/apr/15T21:30:44.55',
-     datetime.datetime(2021, 4, 15, 21, 30, 44, 55)),
-    ('2021/apr/15 at 21:30:44.55',
-     datetime.datetime(2021, 4, 15, 21, 30, 44, 55)),
-    ('2021/4/15 at 21:30:44.55',
-     datetime.datetime(2021, 4, 15, 21, 30, 44, 55)),
-    ('2021/04/15 at 21:30:44.55',
-     datetime.datetime(2021, 4, 15, 21, 30, 44, 55)),
-    ('2021/04/15 at 21:30:44.55Z',
-     datetime.datetime(2021, 4, 15, 21, 30, 44, 55,
-                       tzinfo=pytz.timezone('UTC'))),
-    ('2021/04/15 at 21:30:44.55EST',
-     datetime.datetime(2021, 4, 15, 21, 30, 44, 55,
-                       tzinfo=pytz.timezone('EST'))),
-    ('13 days after last memorial day at 12 seconds before 4pm',
-     datetime.datetime(2020, 6, 7, 15, 59, 48)),
-    ('    2     days     before   yesterday    at   9am      ',
-     datetime.datetime(2021, 6, 29, 9)),
-    ('-3 days before today',
-     datetime.datetime(2021, 7, 5)),
-    ('3 days before yesterday at midnight EST',
-     datetime.datetime(2021, 6, 28, tzinfo=pytz.timezone('EST')))
+    ('today', datetime.datetime(2021, 7, 2)),
+    ('tomorrow', datetime.datetime(2021, 7, 3)),
+    ('yesterday', datetime.datetime(2021, 7, 1)),
+    ('21:30', datetime.datetime(2021, 7, 2, 21, 30, 0, 0)),
+    ('12:01am', datetime.datetime(2021, 7, 2, 0, 1, 0, 0)),
+    ('12:02p', datetime.datetime(2021, 7, 2, 12, 2, 0, 0)),
+    ('0:03', datetime.datetime(2021, 7, 2, 0, 3, 0, 0)),
+    ('last wednesday', datetime.datetime(2021, 6, 30)),
+    ('this wed', datetime.datetime(2021, 7, 7)),
+    ('next wed', datetime.datetime(2021, 7, 14)),
+    ('this coming tues', datetime.datetime(2021, 7, 6)),
+    ('this past monday', datetime.datetime(2021, 6, 28)),
+    ('4 days ago', datetime.datetime(2021, 6, 28)),
+    ('4 mondays ago', datetime.datetime(2021, 6, 7)),
+    ('4 months ago', datetime.datetime(2021, 3, 2)),
+    ('3 days back', datetime.datetime(2021, 6, 29)),
+    ('13 weeks from now', datetime.datetime(2021, 10, 1)),
+    ('1 year from now', datetime.datetime(2022, 7, 2)),
+    ('4 weeks from now', datetime.datetime(2021, 7, 30)),
+    ('3 saturdays ago', datetime.datetime(2021, 6, 12)),
+    ('4 months from today', datetime.datetime(2021, 11, 2)),
+    ('4 years from yesterday', datetime.datetime(2025, 7, 1)),
+    ('4 weeks from tomorrow', datetime.datetime(2021, 7, 31)),
+    ('april 15, 2005', datetime.datetime(2005, 4, 15)),
+    ('april 14', datetime.datetime(2021, 4, 14)),
+    ('9:30am on last wednesday', datetime.datetime(2021, 6, 30, 9, 30)),
+    ('2005/apr/15', datetime.datetime(2005, 4, 15)),
+    ('2005 apr 15', datetime.datetime(2005, 4, 15)),
+    ('the 1st wednesday in may', datetime.datetime(2021, 5, 5)),
+    ('last sun of june', datetime.datetime(2021, 6, 27)),
+    ('this Easter', datetime.datetime(2021, 4, 4)),
+    ('last christmas', datetime.datetime(2020, 12, 25)),
+    ('last Xmas', datetime.datetime(2020, 12, 25)),
+    ('xmas, 1999', datetime.datetime(1999, 12, 25)),
+    ('next mlk day', datetime.datetime(2022, 1, 17)),
+    ('Halloween, 2020', datetime.datetime(2020, 10, 31)),
+    ('5 work days after independence day', datetime.datetime(2021, 7, 12)),
+    ('50 working days from last wed', datetime.datetime(2021, 9, 10)),
+    ('25 working days before columbus day', datetime.datetime(2021, 9, 3)),
+    ('today +1 week', datetime.datetime(2021, 7, 9)),
+    ('sunday -3 weeks', datetime.datetime(2021, 6, 13)),
+    ('4 weeks before xmas, 1999', datetime.datetime(1999, 11, 27)),
+    ('3 days before new years eve, 2000', datetime.datetime(2000, 12, 28)),
+    ('july 4th', datetime.datetime(2021, 7, 4)),
+    ('the ides of march', datetime.datetime(2021, 3, 15)),
+    ('the nones of april', datetime.datetime(2021, 4, 5)),
+    ('the kalends of may', datetime.datetime(2021, 5, 1)),
+    ('9/11/2001', datetime.datetime(2001, 9, 11)),
+    ('4 sundays before veterans\' day', datetime.datetime(2021, 10, 17)),
+    ('xmas eve', datetime.datetime(2021, 12, 24)),
+    ('this friday at 5pm', datetime.datetime(2021, 7, 9, 17, 0, 0)),
+    ('presidents day', datetime.datetime(2021, 2, 15)),
+    ('memorial day, 1921', datetime.datetime(1921, 5, 30)),
+    ('today -4 wednesdays', datetime.datetime(2021, 6, 9)),
+    ('thanksgiving', datetime.datetime(2021, 11, 25)),
+    ('2 sun in jun', datetime.datetime(2021, 6, 13)),
+    ('easter -40 days', datetime.datetime(2021, 2, 23)),
+    ('easter +39 days', datetime.datetime(2021, 5, 13)),
+    ('1st tuesday in nov, 2024', datetime.datetime(2024, 11, 5)),
+    (
+        '2 days before last xmas at 3:14:15.92a',
+        datetime.datetime(2020, 12, 23, 3, 14, 15, 92),
+    ),
+    (
+        '3 weeks after xmas, 1995 at midday',
+        datetime.datetime(1996, 1, 15, 12, 0, 0),
+    ),
+    (
+        '4 months before easter, 1992 at midnight',
+        datetime.datetime(1991, 12, 19),
+    ),
+    (
+        '5 months before halloween, 1995 at noon',
+        datetime.datetime(1995, 5, 31, 12),
+    ),
+    ('4 days before last wednesday', datetime.datetime(2021, 6, 26)),
+    ('44 months after today', datetime.datetime(2025, 3, 2)),
+    ('44 years before today', datetime.datetime(1977, 7, 2)),
+    ('44 weeks ago', datetime.datetime(2020, 8, 28)),
+    ('15 minutes to 3am', datetime.datetime(2021, 7, 2, 2, 45)),
+    ('quarter past 4pm', datetime.datetime(2021, 7, 2, 16, 15)),
+    ('half past 9', datetime.datetime(2021, 7, 2, 9, 30)),
+    ('4 seconds to midnight', datetime.datetime(2021, 7, 1, 23, 59, 56)),
+    (
+        '4 seconds to midnight, tomorrow',
+        datetime.datetime(2021, 7, 2, 23, 59, 56),
+    ),
+    ('2021/apr/15T21:30:44.55', datetime.datetime(2021, 4, 15, 21, 30, 44, 55)),
+    (
+        '2021/apr/15 at 21:30:44.55',
+        datetime.datetime(2021, 4, 15, 21, 30, 44, 55),
+    ),
+    (
+        '2021/4/15 at 21:30:44.55',
+        datetime.datetime(2021, 4, 15, 21, 30, 44, 55),
+    ),
+    (
+        '2021/04/15 at 21:30:44.55',
+        datetime.datetime(2021, 4, 15, 21, 30, 44, 55),
+    ),
+    (
+        '2021/04/15 at 21:30:44.55Z',
+        datetime.datetime(2021, 4, 15, 21, 30, 44, 55, tzinfo=pytz.timezone('UTC')),
+    ),
+    (
+        '2021/04/15 at 21:30:44.55EST',
+        datetime.datetime(2021, 4, 15, 21, 30, 44, 55, tzinfo=pytz.timezone('EST')),
+    ),
+    (
+        '13 days after last memorial day at 12 seconds before 4pm',
+        datetime.datetime(2020, 6, 7, 15, 59, 48),
+    ),
+    (
+        '    2     days     before   yesterday    at   9am      ',
+        datetime.datetime(2021, 6, 29, 9),
+    ),
+    ('-3 days before today', datetime.datetime(2021, 7, 5)),
+    (
+        '3 days before yesterday at midnight EST',
+        datetime.datetime(2021, 6, 28, tzinfo=pytz.timezone('EST')),
+    ),
 ]
 
 
 class TestDateparseUtils(unittest.TestCase):
-
     @uu.check_method_for_perf_regressions
     def test_dateparsing(self):
-        dp = du.DateParser(
-            override_now_for_test_purposes = datetime.datetime(2021, 7, 2)
-        )
+        dp = du.DateParser(override_now_for_test_purposes=datetime.datetime(2021, 7, 2))
 
         for (txt, expected_dt) in parsable_expressions:
             try:
@@ -196,28 +147,26 @@ class TestDateparseUtils(unittest.TestCase):
                 self.assertEqual(
                     actual_dt,
                     expected_dt,
-                    f'"{txt}", got "{actual_dt}" while expecting "{expected_dt}"'
+                    f'"{txt}", got "{actual_dt}" while expecting "{expected_dt}"',
                 )
             except du.ParseException:
                 self.fail(f'Expected "{txt}" to parse successfully.')
 
     def test_whitespace_handling(self):
-        dp = du.DateParser(
-            override_now_for_test_purposes = datetime.datetime(2021, 7, 2)
-        )
+        dp = du.DateParser(override_now_for_test_purposes=datetime.datetime(2021, 7, 2))
 
         for (txt, expected_dt) in parsable_expressions:
             try:
                 txt = f' {txt} '
                 i = random.randint(2, 5)
                 replacement = ' ' * i
-                txt = re.sub('\s', replacement, txt)
+                txt = re.sub(r'\s', replacement, txt)
                 actual_dt = dp.parse(txt)
                 self.assertIsNotNone(actual_dt)
                 self.assertEqual(
                     actual_dt,
                     expected_dt,
-                    f'"{txt}", got "{actual_dt}" while expecting "{expected_dt}"'
+                    f'"{txt}", got "{actual_dt}" while expecting "{expected_dt}"',
                 )
             except du.ParseException:
                 self.fail(f'Expected "{txt}" to parse successfully.')
index 195dd636a6a2238ad5df9e23d6caf7f841a0353d..2cc2b5f71d14d7295e4610ffccf16f543c766478 100755 (executable)
@@ -3,16 +3,13 @@
 import unittest
 
 import decorator_utils as du
-
 import unittest_utils as uu
 
 
 class TestDecorators(unittest.TestCase):
-
     def test_singleton(self):
-
         @du.singleton
-        class FooBar():
+        class FooBar:
             pass
 
         x = FooBar()
index a466277704c252377cda349489e86d60a5c22752..e67838a3d32dd6e7c6415ba7edbb73b81c6ad717 100755 (executable)
@@ -1,7 +1,7 @@
 #!/usr/bin/env python3
 
-import random
 import math
+import random
 import unittest
 
 import bootstrap
@@ -10,7 +10,6 @@ import unittest_utils as uu
 
 
 class TestLetterCompress(unittest.TestCase):
-
     def test_with_random_strings(self):
         alphabet = 'abcdefghijklmnopqrstuvwxyz .,"-'
         for n in range(20):
index 87c00d64e30e3a9b0a90e08658742ad0ae2739aa..a9625d621b26278567adac68a84520f618a08238 100755 (executable)
@@ -11,7 +11,6 @@ import unittest_utils as uu
 
 
 class TestLoggingUtils(unittest.TestCase):
-
     def test_output_context(self):
         unique_suffix = sutils.generate_uuid(True)
         filename = f'/tmp/logging_utils_test.{unique_suffix}'
@@ -20,11 +19,11 @@ class TestLoggingUtils(unittest.TestCase):
         with tempfile.SpooledTemporaryFile(mode='r+') as tmpfile1:
             with uu.RecordStdout() as record:
                 with lutils.OutputMultiplexerContext(
-                        lutils.OutputMultiplexer.Destination.FILENAMES |
-                        lutils.OutputMultiplexer.Destination.FILEHANDLES |
-                        lutils.OutputMultiplexer.Destination.LOG_INFO,
-                        filenames = [filename, '/dev/null'],
-                        handles = [tmpfile1, sys.stdout],
+                    lutils.OutputMultiplexer.Destination.FILENAMES
+                    | lutils.OutputMultiplexer.Destination.FILEHANDLES
+                    | lutils.OutputMultiplexer.Destination.LOG_INFO,
+                    filenames=[filename, '/dev/null'],
+                    handles=[tmpfile1, sys.stdout],
                 ) as mplex:
                     mplex.print(secret_message, end='')
 
@@ -46,8 +45,7 @@ class TestLoggingUtils(unittest.TestCase):
         with uu.RecordMultipleStreams(sys.stderr, sys.stdout) as record:
             print("This is a test!")
             print("This is one too.", file=sys.stderr)
-        self.assertEqual(record().readlines(),
-                         ["This is a test!\n", "This is one too.\n"])
+        self.assertEqual(record().readlines(), ["This is a test!\n", "This is one too.\n"])
 
 
 if __name__ == '__main__':
index 57f4637d1c698384b3fcddcdd38f0863cee30641..f9815322817aea4e2657abf437a019d6b22995a1 100755 (executable)
@@ -2,12 +2,11 @@
 
 import unittest
 
-from type.money import Money
 import unittest_utils as uu
+from type.money import Money
 
 
 class TestMoney(unittest.TestCase):
-
     def test_basic_utility(self):
         amount = Money(1.45)
         another = Money.parse("USD 1.45")
index 5648ad350630a68f60af9bf3597a1b0013ec6666..e452d6ac072cc0b2f7a7935262727245b43a53d3 100755 (executable)
@@ -7,13 +7,14 @@ import unittest_utils
 
 
 class TestProfanityFilter(unittest.TestCase):
-
     def test_basic_functionality(self):
         p = pf.ProfanityFilter()
         self.assertTrue(p.is_bad_word('shit'))
         self.assertTrue(p.contains_bad_word('this is another fucking test'))
         self.assertTrue(p.contains_bad_word('this is another fuckin test'))
-        self.assertFalse(p.contains_bad_word('Mary had a little lamb whose fleese was white as snow.'))
+        self.assertFalse(
+            p.contains_bad_word('Mary had a little lamb whose fleese was white as snow.')
+        )
 
 
 if __name__ == '__main__':
index 621539b112b12775f30749dae98886b2246f3d51..e75011921c2fa1432a7824a1e0c91e41dc1585a4 100755 (executable)
@@ -2,34 +2,24 @@
 
 import unittest
 
-from type.rate import Rate
-from type.money import Money
-
 import unittest_utils as uu
+from type.money import Money
+from type.rate import Rate
 
 
 class TestRate(unittest.TestCase):
     def test_basic_utility(self):
         my_stock_returns = Rate(percent_change=-20.0)
         my_portfolio = 1000.0
-        self.assertAlmostEqual(
-            800.0,
-            my_stock_returns.apply_to(my_portfolio)
-        )
+        self.assertAlmostEqual(800.0, my_stock_returns.apply_to(my_portfolio))
 
         my_bond_returns = Rate(percentage=104.5)
         my_money = Money(500.0)
-        self.assertAlmostEqual(
-            Money(522.5),
-            my_bond_returns.apply_to(my_money)
-        )
+        self.assertAlmostEqual(Money(522.5), my_bond_returns.apply_to(my_money))
 
         my_multiplier = Rate(multiplier=1.72)
         my_nose_length = 3.2
-        self.assertAlmostEqual(
-            5.504,
-            my_multiplier.apply_to(my_nose_length)
-        )
+        self.assertAlmostEqual(5.504, my_multiplier.apply_to(my_nose_length))
 
     def test_conversions(self):
         x = Rate(104.55)
index cc570364047382c3d0e2aee570674cc37e87c710..51848946b68c3b661cb21d6b7d38c3eaad15e5ef 100755 (executable)
@@ -2,16 +2,14 @@
 
 import unittest
 
-from ansi import fg, bg, reset
 import bootstrap
 import string_utils as su
-
 import unittest_utils as uu
+from ansi import bg, fg, reset
 
 
 @uu.check_all_methods_for_perf_regressions()
 class TestStringUtils(unittest.TestCase):
-
     def test_is_none_or_empty(self):
         self.assertTrue(su.is_none_or_empty(None))
         self.assertTrue(su.is_none_or_empty(""))
@@ -130,7 +128,7 @@ class TestStringUtils(unittest.TestCase):
         s = f' {fg("red")}  this is a test  {bg("white")}  this is a test  {reset()}  '
         self.assertEqual(
             su.strip_escape_sequences(s),
-            '   this is a test    this is a test    '
+            '   this is a test    this is a test    ',
         )
         s = ' this is another test '
         self.assertEqual(su.strip_escape_sequences(s), s)
index 534813c2ef28e186b0adb71ed22b3b1d7cbe4c98..d9bb652689f8b317af606b4334a9e09604518975 100644 (file)
@@ -126,9 +126,7 @@ def distribute_strings(
     subwidth = math.floor(width / len(strings))
     retval = ""
     for string in strings:
-        string = justify_string(
-            string, width=subwidth, alignment=alignment, padding=padding
-        )
+        string = justify_string(string, width=subwidth, alignment=alignment, padding=padding)
         retval += string
     while len(retval) > width:
         retval = retval.replace('  ', ' ', 1)
@@ -150,13 +148,7 @@ def justify_string_by_chunk(string: str, width: int = 80, padding: str = " ") ->
     padding = padding[0]
     first, *rest, last = string.split()
     w = width - (len(first) + 1 + len(last) + 1)
-    ret = (
-        first
-        + padding
-        + distribute_strings(rest, width=w, padding=padding)
-        + padding
-        + last
-    )
+    ret = first + padding + distribute_strings(rest, width=w, padding=padding) + padding + last
     return ret
 
 
index 13f14b7f835d2e15679ea3bddd9499398056448e..2cb99e029ede3a72f3ccb83e79fbfaf62b6cbb15 100644 (file)
@@ -50,7 +50,8 @@ class CentCount(object):
         if isinstance(other, CentCount):
             if self.currency == other.currency:
                 return CentCount(
-                    centcount=self.centcount + other.centcount, currency=self.currency
+                    centcount=self.centcount + other.centcount,
+                    currency=self.currency,
                 )
             else:
                 raise TypeError('Incompatible currencies in add expression')
@@ -64,7 +65,8 @@ class CentCount(object):
         if isinstance(other, CentCount):
             if self.currency == other.currency:
                 return CentCount(
-                    centcount=self.centcount - other.centcount, currency=self.currency
+                    centcount=self.centcount - other.centcount,
+                    currency=self.currency,
                 )
             else:
                 raise TypeError('Incompatible currencies in add expression')
@@ -79,7 +81,8 @@ class CentCount(object):
             raise TypeError('can not multiply monetary quantities')
         else:
             return CentCount(
-                centcount=int(self.centcount * float(other)), currency=self.currency
+                centcount=int(self.centcount * float(other)),
+                currency=self.currency,
             )
 
     def __truediv__(self, other):
@@ -113,7 +116,8 @@ class CentCount(object):
         if isinstance(other, CentCount):
             if self.currency == other.currency:
                 return CentCount(
-                    centcount=other.centcount - self.centcount, currency=self.currency
+                    centcount=other.centcount - self.centcount,
+                    currency=self.currency,
                 )
             else:
                 raise TypeError('Incompatible currencies in sub expression')
@@ -122,7 +126,8 @@ class CentCount(object):
                 raise TypeError('In strict_mode only two moneys can be added')
             else:
                 return CentCount(
-                    centcount=int(other) - self.centcount, currency=self.currency
+                    centcount=int(other) - self.centcount,
+                    currency=self.currency,
                 )
 
     __rmul__ = __mul__
index 744f63a3997f38b26b379b7bf25d19425ec7d2f3..24ab063a722d5b159be380ffb459cabcd5d35d23 100644 (file)
@@ -2,6 +2,7 @@
 
 import enum
 
+
 @enum.unique
 class Location(enum.Enum):
     UNKNOWN = 0
index d7e6ffa2197c629949ecae30c855df5870cc3a3a..39557aacdf79dc31ca6e48d27f367305325e80b8 100644 (file)
@@ -60,7 +60,8 @@ class Money(object):
                 raise TypeError('In strict_mode only two moneys can be added')
             else:
                 return Money(
-                    amount=self.amount + Decimal(float(other)), currency=self.currency
+                    amount=self.amount + Decimal(float(other)),
+                    currency=self.currency,
                 )
 
     def __sub__(self, other):
@@ -74,7 +75,8 @@ class Money(object):
                 raise TypeError('In strict_mode only two moneys can be added')
             else:
                 return Money(
-                    amount=self.amount - Decimal(float(other)), currency=self.currency
+                    amount=self.amount - Decimal(float(other)),
+                    currency=self.currency,
                 )
 
     def __mul__(self, other):
@@ -82,7 +84,8 @@ class Money(object):
             raise TypeError('can not multiply monetary quantities')
         else:
             return Money(
-                amount=self.amount * Decimal(float(other)), currency=self.currency
+                amount=self.amount * Decimal(float(other)),
+                currency=self.currency,
             )
 
     def __truediv__(self, other):
@@ -90,7 +93,8 @@ class Money(object):
             raise TypeError('can not divide monetary quantities')
         else:
             return Money(
-                amount=self.amount / Decimal(float(other)), currency=self.currency
+                amount=self.amount / Decimal(float(other)),
+                currency=self.currency,
             )
 
     def __float__(self):
@@ -119,7 +123,8 @@ class Money(object):
                 raise TypeError('In strict_mode only two moneys can be added')
             else:
                 return Money(
-                    amount=Decimal(float(other)) - self.amount, currency=self.currency
+                    amount=Decimal(float(other)) - self.amount,
+                    currency=self.currency,
                 )
 
     __rmul__ = __mul__
index 1dc04214000b74377f1772bac2048ad2cccd853d..1230db22f90a6d467a6b94e66daa5674e7963163 100644 (file)
@@ -11,4 +11,3 @@ class Person(enum.Enum):
     AARON_AND_DANA = 4
     AARON = 4
     DANA = 4
-
index b259c6b4c98f687d8e3c8db4b41c280db7ac9d19..f229df75e8b88825d66ca227d7e907d3dc725e1a 100644 (file)
@@ -30,9 +30,7 @@ import function_utils
 import scott_secrets
 
 logger = logging.getLogger(__name__)
-cfg = config.add_commandline_args(
-    f'Logging ({__file__})', 'Args related to function decorators'
-)
+cfg = config.add_commandline_args(f'Logging ({__file__})', 'Args related to function decorators')
 cfg.add_argument(
     '--unittests_ignore_perf',
     action='store_true',
@@ -123,9 +121,7 @@ class DatabasePerfRegressionDataPersister(PerfRegressionDataPersister):
 
     def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
         results = self.conn.execute(
-            sa.text(
-                f'SELECT * FROM runtimes_by_function WHERE function = "{method_id}";'
-            )
+            sa.text(f'SELECT * FROM runtimes_by_function WHERE function = "{method_id}";')
         )
         ret: Dict[str, List[float]] = {method_id: []}
         for result in results.all():
@@ -165,9 +161,7 @@ def check_method_for_perf_regressions(func: Callable) -> Callable:
             helper = FileBasedPerfRegressionDataPersister(filename)
         elif config.config['unittests_persistance_strategy'] == 'DATABASE':
             dbspec = config.config['unittests_perfdb_spec']
-            dbspec = dbspec.replace(
-                '<PASSWORD>', scott_secrets.MARIADB_UNITTEST_PERF_PASSWORD
-            )
+            dbspec = dbspec.replace('<PASSWORD>', scott_secrets.MARIADB_UNITTEST_PERF_PASSWORD)
             helper = DatabasePerfRegressionDataPersister(dbspec)
         else:
             raise Exception('Unknown/unexpected --unittests_persistance_strategy value')
index d9e4253e4cd77165fae6c4962d5957d74b619d9c..78c1f9b4f7e7c1a5ef618b54eeed2eb012db7a4d 100644 (file)
@@ -7,9 +7,7 @@ import config
 import decorator_utils
 import list_utils
 
-cfg = config.add_commandline_args(
-    f'Unscramble! ({__file__})', 'A fast word unscrambler.'
-)
+cfg = config.add_commandline_args(f'Unscramble! ({__file__})', 'A fast word unscrambler.')
 cfg.add_argument(
     "--unscramble_indexfile",
     help="Path to a file of signature -> word index.",
index 1d6c3ebdb7bf470ecbeb0d65bb351721a3ba3ec7..e1da4310b76a77693aa505452e0f3748c4fcc62d 100644 (file)
@@ -47,9 +47,7 @@ class WaitablePresenceDetectorWithMemory(state_tracker.WaitableAutomaticStateTra
         self.everyone_gone_since: Optional[datetime.datetime] = None
         self.someone_home_since: Optional[datetime.datetime] = None
         self.location = override_location
-        self.detector: base_presence.PresenceDetection = (
-            base_presence.PresenceDetection()
-        )
+        self.detector: base_presence.PresenceDetection = base_presence.PresenceDetection()
         super().__init__(
             {
                 'poll_presence': override_update_interval_sec,