From: Scott Date: Mon, 31 Jan 2022 06:57:29 +0000 (-0800) Subject: --unittests_ignore_perf shouldn't mess with the database / file X-Git-Url: https://wannabe.guru.org/gitweb/?a=commitdiff_plain;h=f77068f0dc494d9394cdacafc6f719730926839f;p=python_utils.git --unittests_ignore_perf shouldn't mess with the database / file layer at all. When the db was down some of tests failed even though they didn't care about perf. Let's not do that. --- diff --git a/unittest_utils.py b/unittest_utils.py index e84b4eb..b9746a8 100644 --- a/unittest_utils.py +++ b/unittest_utils.py @@ -158,6 +158,9 @@ def check_method_for_perf_regressions(func: Callable) -> Callable: @functools.wraps(func) def wrapper_perf_monitor(*args, **kwargs): + if config.config['unittests_ignore_perf']: + return func(*args, **kwargs) + if config.config['unittests_persistance_strategy'] == 'FILE': filename = config.config['unittests_perfdb_filename'] helper = FileBasedPerfRegressionDataPersister(filename) @@ -208,7 +211,7 @@ def check_method_for_perf_regressions(func: Callable) -> Callable: limit = slowest + stdev * 4 logger.debug(f'For {func_name}, max acceptable runtime is {limit:f}s') logger.debug(f'For {func_name}, actual observed runtime was {run_time:f}s') - if run_time > limit and not config.config['unittests_ignore_perf']: + if run_time > limit: msg = f'''{func_id} performance has regressed unacceptably. {slowest:f}s is the slowest runtime on record in {len(hist)} perf samples. It just ran in {run_time:f}s which is 4+ stdevs slower than the slowest.