Used isort to sort imports. Also added to the git pre-commit hook.
[python_utils.git] / unittest_utils.py
index e84b4eb929cfb8ac37daf31811b675c9d9d7825e..ba9ca28f091bc70bd232cb6f059116cfc70d7fb9 100644 (file)
@@ -7,7 +7,6 @@
    caveat emptor.
 """
 
-from abc import ABC, abstractmethod
 import contextlib
 import functools
 import inspect
@@ -16,20 +15,20 @@ import os
 import pickle
 import random
 import statistics
-import time
 import tempfile
-from typing import Callable, Dict, List
+import time
 import unittest
 import warnings
+from abc import ABC, abstractmethod
+from typing import Any, Callable, Dict, List, Optional
+
+import sqlalchemy as sa
 
 import bootstrap
 import config
 import function_utils
 import scott_secrets
 
-import sqlalchemy as sa
-
-
 logger = logging.getLogger(__name__)
 cfg = config.add_commandline_args(
     f'Logging ({__file__})', 'Args related to function decorators'
@@ -83,7 +82,7 @@ class PerfRegressionDataPersister(ABC):
         pass
 
     @abstractmethod
-    def load_performance_data(self) -> Dict[str, List[float]]:
+    def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
         pass
 
     @abstractmethod
@@ -98,7 +97,7 @@ class PerfRegressionDataPersister(ABC):
 class FileBasedPerfRegressionDataPersister(PerfRegressionDataPersister):
     def __init__(self, filename: str):
         self.filename = filename
-        self.traces_to_delete = []
+        self.traces_to_delete: List[str] = []
 
     def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
         with open(self.filename, 'rb') as f:
@@ -128,7 +127,7 @@ class DatabasePerfRegressionDataPersister(PerfRegressionDataPersister):
                 f'SELECT * FROM runtimes_by_function WHERE function = "{method_id}";'
             )
         )
-        ret = {method_id: []}
+        ret: Dict[str, List[float]] = {method_id: []}
         for result in results.all():
             ret[method_id].append(result['runtime'])
         results.close()
@@ -158,6 +157,9 @@ def check_method_for_perf_regressions(func: Callable) -> Callable:
 
     @functools.wraps(func)
     def wrapper_perf_monitor(*args, **kwargs):
+        if config.config['unittests_ignore_perf']:
+            return func(*args, **kwargs)
+
         if config.config['unittests_persistance_strategy'] == 'FILE':
             filename = config.config['unittests_perfdb_filename']
             helper = FileBasedPerfRegressionDataPersister(filename)
@@ -208,7 +210,7 @@ def check_method_for_perf_regressions(func: Callable) -> Callable:
             limit = slowest + stdev * 4
             logger.debug(f'For {func_name}, max acceptable runtime is {limit:f}s')
             logger.debug(f'For {func_name}, actual observed runtime was {run_time:f}s')
-            if run_time > limit and not config.config['unittests_ignore_perf']:
+            if run_time > limit:
                 msg = f'''{func_id} performance has regressed unacceptably.
 {slowest:f}s is the slowest runtime on record in {len(hist)} perf samples.
 It just ran in {run_time:f}s which is 4+ stdevs slower than the slowest.
@@ -280,14 +282,16 @@ class RecordStdout(object):
 
     def __init__(self) -> None:
         self.destination = tempfile.SpooledTemporaryFile(mode='r+')
-        self.recorder = None
+        self.recorder: Optional[contextlib.redirect_stdout] = None
 
     def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
         self.recorder = contextlib.redirect_stdout(self.destination)
+        assert self.recorder
         self.recorder.__enter__()
         return lambda: self.destination
 
-    def __exit__(self, *args) -> bool:
+    def __exit__(self, *args) -> Optional[bool]:
+        assert self.recorder
         self.recorder.__exit__(*args)
         self.destination.seek(0)
         return None
@@ -307,14 +311,16 @@ class RecordStderr(object):
 
     def __init__(self) -> None:
         self.destination = tempfile.SpooledTemporaryFile(mode='r+')
-        self.recorder = None
+        self.recorder: Optional[contextlib.redirect_stdout[Any]] = None
 
     def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
-        self.recorder = contextlib.redirect_stderr(self.destination)
+        self.recorder = contextlib.redirect_stderr(self.destination)  # type: ignore
+        assert self.recorder
         self.recorder.__enter__()
         return lambda: self.destination
 
-    def __exit__(self, *args) -> bool:
+    def __exit__(self, *args) -> Optional[bool]:
+        assert self.recorder
         self.recorder.__exit__(*args)
         self.destination.seek(0)
         return None
@@ -328,7 +334,7 @@ class RecordMultipleStreams(object):
     def __init__(self, *files) -> None:
         self.files = [*files]
         self.destination = tempfile.SpooledTemporaryFile(mode='r+')
-        self.saved_writes = []
+        self.saved_writes: List[Callable[..., Any]] = []
 
     def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
         for f in self.files:
@@ -336,10 +342,11 @@ class RecordMultipleStreams(object):
             f.write = self.destination.write
         return lambda: self.destination
 
-    def __exit__(self, *args) -> bool:
+    def __exit__(self, *args) -> Optional[bool]:
         for f in self.files:
             f.write = self.saved_writes.pop()
         self.destination.seek(0)
+        return None
 
 
 if __name__ == '__main__':