Since this thing is on the innerwebs I suppose it should have a
[python_utils.git] / remote_worker.py
index 82b80ea3d722090ab7254eb24eac5884a9520172..8aef1dee1bc94a1bb85a378adab7169810b585ce 100755 (executable)
@@ -1,7 +1,10 @@
 #!/usr/bin/env python3
 
+# © Copyright 2021-2022, Scott Gasch
+
 """A simple utility to unpickle some code, run it, and pickle the
 results.
+
 """
 
 import logging
@@ -59,14 +62,12 @@ def watch_for_cancel(terminate_event: threading.Event) -> None:
         for ancestor in ancestors:
             name = ancestor.name()
             pid = ancestor.pid
-            logger.debug(f'Ancestor process {name} (pid={pid})')
+            logger.debug('Ancestor process %s (pid=%d)', name, pid)
             if 'ssh' in name.lower():
                 saw_sshd = True
                 break
         if not saw_sshd:
-            logger.error(
-                'Did not see sshd in our ancestors list?!  Committing suicide.'
-            )
+            logger.error('Did not see sshd in our ancestors list?!  Committing suicide.')
             os.system('pstree')
             os.kill(os.getpid(), signal.SIGTERM)
             time.sleep(5.0)
@@ -99,43 +100,43 @@ def main() -> None:
     if config.config['watch_for_cancel']:
         (thread, stop_thread) = watch_for_cancel()
 
-    logger.debug(f'Reading {in_file}.')
+    logger.debug('Reading %s.', in_file)
     try:
         with open(in_file, 'rb') as rb:
             serialized = rb.read()
     except Exception as e:
         logger.exception(e)
-        logger.critical(f'Problem reading {in_file}.  Aborting.')
+        logger.critical('Problem reading %s. Aborting.', in_file)
         cleanup_and_exit(thread, stop_thread, 1)
 
-    logger.debug(f'Deserializing {in_file}.')
+    logger.debug('Deserializing %s', in_file)
     try:
         fun, args, kwargs = cloudpickle.loads(serialized)
     except Exception as e:
         logger.exception(e)
-        logger.critical(f'Problem deserializing {in_file}.  Aborting.')
+        logger.critical('Problem deserializing %s. Aborting.', in_file)
         cleanup_and_exit(thread, stop_thread, 2)
 
     logger.debug('Invoking user code...')
     with Timer() as t:
         ret = fun(*args, **kwargs)
-    logger.debug(f'User code took {t():.1f}s')
+    logger.debug('User code took %.1fs', t())
 
     logger.debug('Serializing results')
     try:
         serialized = cloudpickle.dumps(ret)
     except Exception as e:
         logger.exception(e)
-        logger.critical(f'Could not serialize result ({type(ret)}).  Aborting.')
+        logger.critical('Could not serialize result (%s). Aborting.', type(ret))
         cleanup_and_exit(thread, stop_thread, 3)
 
-    logger.debug(f'Writing {out_file}.')
+    logger.debug('Writing %s', out_file)
     try:
         with open(out_file, 'wb') as wb:
             wb.write(serialized)
     except Exception as e:
         logger.exception(e)
-        logger.critical(f'Error writing {out_file}.  Aborting.')
+        logger.critical('Error writing %s. Aborting.', out_file)
         cleanup_and_exit(thread, stop_thread, 4)
     cleanup_and_exit(thread, stop_thread, 0)