Minor changes; make model trainer show results for all models, etc...
authorScott Gasch <[email protected]>
Sat, 2 Apr 2022 17:50:55 +0000 (10:50 -0700)
committerScott Gasch <[email protected]>
Sat, 2 Apr 2022 17:50:55 +0000 (10:50 -0700)
base_presence.py
ml/model_trainer.py

index f996d548cc354d65f8a0e5d94c6a3b0e6486d89a..5984b416558d8959bb2bc8859bd32de9ad5dc95b 100755 (executable)
@@ -122,7 +122,7 @@ class PresenceDetection(object):
         try:
             raw = cmd(
                 "ssh [email protected] 'cat /home/scott/cron/persisted_mac_addresses.txt'",
-                timeout_seconds=10.0,
+                timeout_seconds=20.0,
             )
             self.parse_raw_macs_file(raw, Location.CABIN)
         except Exception as e:
index e3d89c20421619533da6c8fdcddee739ed33ddff..34ded741a21131b8f7638ebf475374038c3e6101 100644 (file)
@@ -12,6 +12,7 @@ import os
 import pickle
 import random
 import sys
+import time
 import warnings
 from abc import ABC, abstractmethod
 from dataclasses import dataclass
@@ -143,6 +144,7 @@ class TrainingBlueprint(ABC):
             models.append(model)
             modelid_to_params[model.get_id()] = str(params)
 
+        all_models = {}
         best_model = None
         best_score: Optional[np.float64] = None
         best_test_score: Optional[np.float64] = None
@@ -161,6 +163,7 @@ class TrainingBlueprint(ABC):
                     self.y_test,
                 )
                 score = (training_score + test_score * 20) / 21
+                all_models[params] = (score, training_score, test_score)
                 if not self.spec.quiet:
                     print(
                         f"{bold()}{params}{reset()}: "
@@ -177,15 +180,22 @@ class TrainingBlueprint(ABC):
                     if not self.spec.quiet:
                         print(f"New best score {best_score:.2f}% with params {params}")
 
-        if not self.spec.quiet:
-            executors.DefaultExecutors().shutdown()
-            msg = f"Done training; best test set score was: {best_test_score:.1f}%"
-            print(msg)
-            logger.info(msg)
-
+        executors.DefaultExecutors().shutdown()
         assert best_training_score is not None
         assert best_test_score is not None
         assert best_params is not None
+
+        if not self.spec.quiet:
+            time.sleep(1.0)
+            print('Done training...')
+            for params in all_models:
+                msg = f'{bold()}{params}{reset()}: score={all_models[params][0]:.2f}% '
+                msg += f'({all_models[params][2]:.2f}% test, '
+                msg += f'{all_models[params][1]:.2f}% train)'
+                if params == best_params:
+                    msg += f'{bold()} <-- winner{reset()}'
+                print(msg)
+
         (
             scaler_filename,
             model_filename,