- automatically wrap unittest.main() with a call to
- bootstrap.initialize so that we getLogger config, commandline args,
- logging control, etc... this works fine but it's a little hacky so
- caveat emptor.
+automatically wrap unittest.main() with a call to bootstrap.initialize
+so that we getLogger config, commandline args, logging control,
+etc... this works fine but it's a little hacky so caveat emptor.
+
+from abc import ABC, abstractmethod
+from typing import Any, Callable, Dict, List, Literal, Optional
+
+import sqlalchemy as sa
-cfg = config.add_commandline_args(
- f'Logging ({__file__})',
- 'Args related to function decorators')
+cfg = config.add_commandline_args(f'Logging ({__file__})', 'Args related to function decorators')
)
cfg.add_argument(
'--unittests_persistance_strategy',
choices=['FILE', 'DATABASE'],
default='DATABASE',
)
cfg.add_argument(
'--unittests_persistance_strategy',
choices=['FILE', 'DATABASE'],
default='DATABASE',
)
cfg.add_argument(
'--unittests_perfdb_filename',
type=str,
metavar='FILENAME',
default=f'{os.environ["HOME"]}/.python_unittest_performance_db',
)
cfg.add_argument(
'--unittests_perfdb_filename',
type=str,
metavar='FILENAME',
default=f'{os.environ["HOME"]}/.python_unittest_performance_db',
)
cfg.add_argument(
'--unittests_perfdb_spec',
type=str,
metavar='DBSPEC',
default='mariadb+pymysql://python_unittest:<PASSWORD>@db.house:3306/python_unittest_performance',
)
cfg.add_argument(
'--unittests_perfdb_spec',
type=str,
metavar='DBSPEC',
default='mariadb+pymysql://python_unittest:<PASSWORD>@db.house:3306/python_unittest_performance',
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
with open(self.filename, 'rb') as f:
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
with open(self.filename, 'rb') as f:
self.dbspec = dbspec
self.engine = sa.create_engine(self.dbspec)
self.conn = self.engine.connect()
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
results = self.conn.execute(
self.dbspec = dbspec
self.engine = sa.create_engine(self.dbspec)
self.conn = self.engine.connect()
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
results = self.conn.execute(
def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
self.delete_performance_data(method_id)
def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
self.delete_performance_data(method_id)
def delete_performance_data(self, method_id: str):
sql = f'DELETE FROM runtimes_by_function WHERE function = "{method_id}"'
def delete_performance_data(self, method_id: str):
sql = f'DELETE FROM runtimes_by_function WHERE function = "{method_id}"'
if config.config['unittests_persistance_strategy'] == 'FILE':
filename = config.config['unittests_perfdb_filename']
helper = FileBasedPerfRegressionDataPersister(filename)
if config.config['unittests_persistance_strategy'] == 'FILE':
filename = config.config['unittests_perfdb_filename']
helper = FileBasedPerfRegressionDataPersister(filename)
dbspec = dbspec.replace('<PASSWORD>', scott_secrets.MARIADB_UNITTEST_PERF_PASSWORD)
helper = DatabasePerfRegressionDataPersister(dbspec)
else:
dbspec = dbspec.replace('<PASSWORD>', scott_secrets.MARIADB_UNITTEST_PERF_PASSWORD)
helper = DatabasePerfRegressionDataPersister(dbspec)
else:
- logger.debug(f'Watching {func_name}\'s performance...')
- logger.debug(f'Canonical function identifier = {func_id}')
+ logger.debug('Watching %s\'s performance...', func_name)
+ logger.debug('Canonical function identifier = "%s"', func_id)
hist = perfdb.get(func_id, [])
if len(hist) < config.config['unittests_num_perf_samples']:
hist.append(run_time)
hist = perfdb.get(func_id, [])
if len(hist) < config.config['unittests_num_perf_samples']:
hist.append(run_time)
- logger.debug(
- f'For {func_name}, max acceptable runtime is {limit:f}s'
- )
- logger.debug(
- f'For {func_name}, actual observed runtime was {run_time:f}s'
- )
- if (
- run_time > limit and
- not config.config['unittests_ignore_perf']
- ):
+ logger.debug('For %s, max acceptable runtime is %.2fs', func_name, limit)
+ logger.debug('For %s, actual observed runtime was %.2fs', func_name, run_time)
+ if run_time > limit:
-{hist[-1]:f}s is the slowest record in {len(hist)} db perf samples.
-It just ran in {run_time:f}s which is >5 stdevs slower than the slowest sample.
+{slowest:f}s is the slowest runtime on record in {len(hist)} perf samples.
+It just ran in {run_time:f}s which is 4+ stdevs slower than the slowest.
n = min(config.config['unittests_num_perf_samples'], len(hist))
hist = random.sample(hist, n)
hist.sort()
perfdb[func_id] = hist
helper.save_performance_data(func_id, perfdb)
return value
n = min(config.config['unittests_num_perf_samples'], len(hist))
hist = random.sample(hist, n)
hist.sort()
perfdb[func_id] = hist
helper.save_performance_data(func_id, perfdb)
return value
+ """Decorate unittests with this to pay attention to the perf of the
+ testcode and flag perf regressions. e.g.
+
+ import unittest_utils as uu
+
+ @uu.check_all_methods_for_perf_regressions()
+ class TestMyClass(unittest.TestCase):
+
+ def test_some_part_of_my_class(self):
+ ...
+
+ """
+
def decorate_the_testcase(cls):
if issubclass(cls, unittest.TestCase):
for name, m in inspect.getmembers(cls, inspect.isfunction):
if name.startswith(prefix):
setattr(cls, name, check_method_for_perf_regressions(m))
def decorate_the_testcase(cls):
if issubclass(cls, unittest.TestCase):
for name, m in inspect.getmembers(cls, inspect.isfunction):
if name.startswith(prefix):
setattr(cls, name, check_method_for_perf_regressions(m))
def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
self.recorder = contextlib.redirect_stdout(self.destination)
def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
self.recorder = contextlib.redirect_stdout(self.destination)
... print("This is a test!", file=sys.stderr)
>>> print({record().readline()})
{'This is a test!\\n'}
... print("This is a test!", file=sys.stderr)
>>> print({record().readline()})
{'This is a test!\\n'}
"""
Record the output to more than one stream.
"""
def __init__(self, *files) -> None:
"""
Record the output to more than one stream.
"""
def __init__(self, *files) -> None: