From f77068f0dc494d9394cdacafc6f719730926839f Mon Sep 17 00:00:00 2001 From: Scott Date: Sun, 30 Jan 2022 22:57:29 -0800 Subject: [PATCH] --unittests_ignore_perf shouldn't mess with the database / file layer at all. When the db was down some of tests failed even though they didn't care about perf. Let's not do that. --- unittest_utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/unittest_utils.py b/unittest_utils.py index e84b4eb..b9746a8 100644 --- a/unittest_utils.py +++ b/unittest_utils.py @@ -158,6 +158,9 @@ def check_method_for_perf_regressions(func: Callable) -> Callable: @functools.wraps(func) def wrapper_perf_monitor(*args, **kwargs): + if config.config['unittests_ignore_perf']: + return func(*args, **kwargs) + if config.config['unittests_persistance_strategy'] == 'FILE': filename = config.config['unittests_perfdb_filename'] helper = FileBasedPerfRegressionDataPersister(filename) @@ -208,7 +211,7 @@ def check_method_for_perf_regressions(func: Callable) -> Callable: limit = slowest + stdev * 4 logger.debug(f'For {func_name}, max acceptable runtime is {limit:f}s') logger.debug(f'For {func_name}, actual observed runtime was {run_time:f}s') - if run_time > limit and not config.config['unittests_ignore_perf']: + if run_time > limit: msg = f'''{func_id} performance has regressed unacceptably. {slowest:f}s is the slowest runtime on record in {len(hist)} perf samples. It just ran in {run_time:f}s which is 4+ stdevs slower than the slowest. -- 2.46.0