Ignore ANTLR generated files.
[python_utils.git] / remote_worker.py
1 #!/usr/bin/env python3
2
3 # © Copyright 2021-2022, Scott Gasch
4
5 """A simple utility to unpickle some code, run it, and pickle the
6 results.
7
8 """
9
10 import logging
11 import os
12 import signal
13 import sys
14 import threading
15 import time
16 from typing import Optional
17
18 import cloudpickle  # type: ignore
19 import psutil  # type: ignore
20
21 import argparse_utils
22 import bootstrap
23 import config
24 from stopwatch import Timer
25 from thread_utils import background_thread
26
27 logger = logging.getLogger(__file__)
28
29 cfg = config.add_commandline_args(
30     f"Remote Worker ({__file__})",
31     "Helper to run pickled code remotely and return results",
32 )
33 cfg.add_argument(
34     '--code_file',
35     type=str,
36     required=True,
37     metavar='FILENAME',
38     help='The location of the bundle of code to execute.',
39 )
40 cfg.add_argument(
41     '--result_file',
42     type=str,
43     required=True,
44     metavar='FILENAME',
45     help='The location where we should write the computation results.',
46 )
47 cfg.add_argument(
48     '--watch_for_cancel',
49     action=argparse_utils.ActionNoYes,
50     default=True,
51     help='Should we watch for the cancellation of our parent ssh process?',
52 )
53
54
55 @background_thread
56 def watch_for_cancel(terminate_event: threading.Event) -> None:
57     logger.debug('Starting up background thread...')
58     p = psutil.Process(os.getpid())
59     while True:
60         saw_sshd = False
61         ancestors = p.parents()
62         for ancestor in ancestors:
63             name = ancestor.name()
64             pid = ancestor.pid
65             logger.debug('Ancestor process %s (pid=%d)', name, pid)
66             if 'ssh' in name.lower():
67                 saw_sshd = True
68                 break
69         if not saw_sshd:
70             logger.error('Did not see sshd in our ancestors list?!  Committing suicide.')
71             os.system('pstree')
72             os.kill(os.getpid(), signal.SIGTERM)
73             time.sleep(5.0)
74             os.kill(os.getpid(), signal.SIGKILL)
75             sys.exit(-1)
76         if terminate_event.is_set():
77             return
78         time.sleep(1.0)
79
80
81 def cleanup_and_exit(
82     thread: Optional[threading.Thread],
83     stop_thread: Optional[threading.Event],
84     exit_code: int,
85 ) -> None:
86     if stop_thread is not None:
87         stop_thread.set()
88         assert thread is not None
89         thread.join()
90     sys.exit(exit_code)
91
92
93 @bootstrap.initialize
94 def main() -> None:
95     in_file = config.config['code_file']
96     out_file = config.config['result_file']
97
98     thread = None
99     stop_thread = None
100     if config.config['watch_for_cancel']:
101         (thread, stop_thread) = watch_for_cancel()
102
103     logger.debug('Reading %s.', in_file)
104     try:
105         with open(in_file, 'rb') as rb:
106             serialized = rb.read()
107     except Exception as e:
108         logger.exception(e)
109         logger.critical('Problem reading %s. Aborting.', in_file)
110         cleanup_and_exit(thread, stop_thread, 1)
111
112     logger.debug('Deserializing %s', in_file)
113     try:
114         fun, args, kwargs = cloudpickle.loads(serialized)
115     except Exception as e:
116         logger.exception(e)
117         logger.critical('Problem deserializing %s. Aborting.', in_file)
118         cleanup_and_exit(thread, stop_thread, 2)
119
120     logger.debug('Invoking user code...')
121     with Timer() as t:
122         ret = fun(*args, **kwargs)
123     logger.debug('User code took %.1fs', t())
124
125     logger.debug('Serializing results')
126     try:
127         serialized = cloudpickle.dumps(ret)
128     except Exception as e:
129         logger.exception(e)
130         logger.critical('Could not serialize result (%s). Aborting.', type(ret))
131         cleanup_and_exit(thread, stop_thread, 3)
132
133     logger.debug('Writing %s', out_file)
134     try:
135         with open(out_file, 'wb') as wb:
136             wb.write(serialized)
137     except Exception as e:
138         logger.exception(e)
139         logger.critical('Error writing %s. Aborting.', out_file)
140         cleanup_and_exit(thread, stop_thread, 4)
141     cleanup_and_exit(thread, stop_thread, 0)
142
143
144 if __name__ == '__main__':
145     main()