-cfg = config.add_commandline_args(
- f'Logging ({__file__})',
- 'Args related to function decorators')
+cfg = config.add_commandline_args(f'Logging ({__file__})', 'Args related to function decorators')
)
cfg.add_argument(
'--unittests_persistance_strategy',
choices=['FILE', 'DATABASE'],
default='DATABASE',
)
cfg.add_argument(
'--unittests_persistance_strategy',
choices=['FILE', 'DATABASE'],
default='DATABASE',
)
cfg.add_argument(
'--unittests_perfdb_filename',
type=str,
metavar='FILENAME',
default=f'{os.environ["HOME"]}/.python_unittest_performance_db',
)
cfg.add_argument(
'--unittests_perfdb_filename',
type=str,
metavar='FILENAME',
default=f'{os.environ["HOME"]}/.python_unittest_performance_db',
)
cfg.add_argument(
'--unittests_perfdb_spec',
type=str,
metavar='DBSPEC',
default='mariadb+pymysql://python_unittest:<PASSWORD>@db.house:3306/python_unittest_performance',
)
cfg.add_argument(
'--unittests_perfdb_spec',
type=str,
metavar='DBSPEC',
default='mariadb+pymysql://python_unittest:<PASSWORD>@db.house:3306/python_unittest_performance',
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
with open(self.filename, 'rb') as f:
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
with open(self.filename, 'rb') as f:
self.dbspec = dbspec
self.engine = sa.create_engine(self.dbspec)
self.conn = self.engine.connect()
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
results = self.conn.execute(
self.dbspec = dbspec
self.engine = sa.create_engine(self.dbspec)
self.conn = self.engine.connect()
def load_performance_data(self, method_id: str) -> Dict[str, List[float]]:
results = self.conn.execute(
def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
self.delete_performance_data(method_id)
def save_performance_data(self, method_id: str, data: Dict[str, List[float]]):
self.delete_performance_data(method_id)
def delete_performance_data(self, method_id: str):
sql = f'DELETE FROM runtimes_by_function WHERE function = "{method_id}"'
def delete_performance_data(self, method_id: str):
sql = f'DELETE FROM runtimes_by_function WHERE function = "{method_id}"'
if config.config['unittests_persistance_strategy'] == 'FILE':
filename = config.config['unittests_perfdb_filename']
helper = FileBasedPerfRegressionDataPersister(filename)
if config.config['unittests_persistance_strategy'] == 'FILE':
filename = config.config['unittests_perfdb_filename']
helper = FileBasedPerfRegressionDataPersister(filename)
dbspec = dbspec.replace('<PASSWORD>', scott_secrets.MARIADB_UNITTEST_PERF_PASSWORD)
helper = DatabasePerfRegressionDataPersister(dbspec)
else:
dbspec = dbspec.replace('<PASSWORD>', scott_secrets.MARIADB_UNITTEST_PERF_PASSWORD)
helper = DatabasePerfRegressionDataPersister(dbspec)
else:
- logger.debug(f'Watching {func_name}\'s performance...')
- logger.debug(f'Canonical function identifier = {func_id}')
+ logger.debug('Watching %s\'s performance...', func_name)
+ logger.debug('Canonical function identifier = "%s"', func_id)
hist = perfdb.get(func_id, [])
if len(hist) < config.config['unittests_num_perf_samples']:
hist.append(run_time)
hist = perfdb.get(func_id, [])
if len(hist) < config.config['unittests_num_perf_samples']:
hist.append(run_time)
- logger.debug(
- f'For {func_name}, max acceptable runtime is {limit:f}s'
- )
- logger.debug(
- f'For {func_name}, actual observed runtime was {run_time:f}s'
- )
- if (
- run_time > limit and
- not config.config['unittests_ignore_perf']
- ):
+ logger.debug('For %s, max acceptable runtime is %.2fs', func_name, limit)
+ logger.debug('For %s, actual observed runtime was %.2fs', func_name, run_time)
+ if run_time > limit:
msg = f'''{func_id} performance has regressed unacceptably.
{slowest:f}s is the slowest runtime on record in {len(hist)} perf samples.
It just ran in {run_time:f}s which is 4+ stdevs slower than the slowest.
msg = f'''{func_id} performance has regressed unacceptably.
{slowest:f}s is the slowest runtime on record in {len(hist)} perf samples.
It just ran in {run_time:f}s which is 4+ stdevs slower than the slowest.
- slf = args[0] # Peek at the wrapped function's self ref.
- slf.fail(msg) # ...to fail the testcase.
+ slf = args[0] # Peek at the wrapped function's self ref.
+ slf.fail(msg) # ...to fail the testcase.
def decorate_the_testcase(cls):
if issubclass(cls, unittest.TestCase):
for name, m in inspect.getmembers(cls, inspect.isfunction):
if name.startswith(prefix):
setattr(cls, name, check_method_for_perf_regressions(m))
def decorate_the_testcase(cls):
if issubclass(cls, unittest.TestCase):
for name, m in inspect.getmembers(cls, inspect.isfunction):
if name.startswith(prefix):
setattr(cls, name, check_method_for_perf_regressions(m))
def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
self.recorder = contextlib.redirect_stdout(self.destination)
def __enter__(self) -> Callable[[], tempfile.SpooledTemporaryFile]:
self.recorder = contextlib.redirect_stdout(self.destination)
... print("This is a test!", file=sys.stderr)
>>> print({record().readline()})
{'This is a test!\\n'}
... print("This is a test!", file=sys.stderr)
>>> print({record().readline()})
{'This is a test!\\n'}
def __init__(self, *files) -> None:
self.files = [*files]
self.destination = tempfile.SpooledTemporaryFile(mode='r+')
def __init__(self, *files) -> None:
self.files = [*files]
self.destination = tempfile.SpooledTemporaryFile(mode='r+')