Used isort to sort imports. Also added to the git pre-commit hook.
[python_utils.git] / remote_worker.py
1 #!/usr/bin/env python3
2
3 """A simple utility to unpickle some code, run it, and pickle the
4 results.
5 """
6
7 import logging
8 import os
9 import signal
10 import sys
11 import threading
12 import time
13 from typing import Optional
14
15 import cloudpickle  # type: ignore
16 import psutil  # type: ignore
17
18 import argparse_utils
19 import bootstrap
20 import config
21 from stopwatch import Timer
22 from thread_utils import background_thread
23
24 logger = logging.getLogger(__file__)
25
26 cfg = config.add_commandline_args(
27     f"Remote Worker ({__file__})",
28     "Helper to run pickled code remotely and return results",
29 )
30 cfg.add_argument(
31     '--code_file',
32     type=str,
33     required=True,
34     metavar='FILENAME',
35     help='The location of the bundle of code to execute.',
36 )
37 cfg.add_argument(
38     '--result_file',
39     type=str,
40     required=True,
41     metavar='FILENAME',
42     help='The location where we should write the computation results.',
43 )
44 cfg.add_argument(
45     '--watch_for_cancel',
46     action=argparse_utils.ActionNoYes,
47     default=True,
48     help='Should we watch for the cancellation of our parent ssh process?',
49 )
50
51
52 @background_thread
53 def watch_for_cancel(terminate_event: threading.Event) -> None:
54     logger.debug('Starting up background thread...')
55     p = psutil.Process(os.getpid())
56     while True:
57         saw_sshd = False
58         ancestors = p.parents()
59         for ancestor in ancestors:
60             name = ancestor.name()
61             pid = ancestor.pid
62             logger.debug(f'Ancestor process {name} (pid={pid})')
63             if 'ssh' in name.lower():
64                 saw_sshd = True
65                 break
66         if not saw_sshd:
67             logger.error(
68                 'Did not see sshd in our ancestors list?!  Committing suicide.'
69             )
70             os.system('pstree')
71             os.kill(os.getpid(), signal.SIGTERM)
72             time.sleep(5.0)
73             os.kill(os.getpid(), signal.SIGKILL)
74             sys.exit(-1)
75         if terminate_event.is_set():
76             return
77         time.sleep(1.0)
78
79
80 def cleanup_and_exit(
81     thread: Optional[threading.Thread],
82     stop_thread: Optional[threading.Event],
83     exit_code: int,
84 ) -> None:
85     if stop_thread is not None:
86         stop_thread.set()
87         assert thread is not None
88         thread.join()
89     sys.exit(exit_code)
90
91
92 @bootstrap.initialize
93 def main() -> None:
94     in_file = config.config['code_file']
95     out_file = config.config['result_file']
96
97     thread = None
98     stop_thread = None
99     if config.config['watch_for_cancel']:
100         (thread, stop_thread) = watch_for_cancel()
101
102     logger.debug(f'Reading {in_file}.')
103     try:
104         with open(in_file, 'rb') as rb:
105             serialized = rb.read()
106     except Exception as e:
107         logger.exception(e)
108         logger.critical(f'Problem reading {in_file}.  Aborting.')
109         cleanup_and_exit(thread, stop_thread, 1)
110
111     logger.debug(f'Deserializing {in_file}.')
112     try:
113         fun, args, kwargs = cloudpickle.loads(serialized)
114     except Exception as e:
115         logger.exception(e)
116         logger.critical(f'Problem deserializing {in_file}.  Aborting.')
117         cleanup_and_exit(thread, stop_thread, 2)
118
119     logger.debug('Invoking user code...')
120     with Timer() as t:
121         ret = fun(*args, **kwargs)
122     logger.debug(f'User code took {t():.1f}s')
123
124     logger.debug('Serializing results')
125     try:
126         serialized = cloudpickle.dumps(ret)
127     except Exception as e:
128         logger.exception(e)
129         logger.critical(f'Could not serialize result ({type(ret)}).  Aborting.')
130         cleanup_and_exit(thread, stop_thread, 3)
131
132     logger.debug(f'Writing {out_file}.')
133     try:
134         with open(out_file, 'wb') as wb:
135             wb.write(serialized)
136     except Exception as e:
137         logger.exception(e)
138         logger.critical(f'Error writing {out_file}.  Aborting.')
139         cleanup_and_exit(thread, stop_thread, 4)
140     cleanup_and_exit(thread, stop_thread, 0)
141
142
143 if __name__ == '__main__':
144     main()