Watch for grandparent ssh termination by default.
[python_utils.git] / executors.py
index 6ccd7b675c760315d05158d68dad0768cc0f0871..2829c6957268dd9d607f0db4e4102510b972e38a 100644 (file)
@@ -61,9 +61,8 @@ parser.add_argument(
     help='Maximum number of failures before giving up on a bundle',
 )
 
-RSYNC = 'rsync -q --no-motd -W --ignore-existing --timeout=60 --size-only -z'
-SSH = 'ssh -oForwardX11=no'
-
+SSH = '/usr/bin/ssh -oForwardX11=no'
+SCP = '/usr/bin/scp'
 
 def make_cloud_pickle(fun, *args, **kwargs):
     logger.debug(f"Making cloudpickled bundle at {fun.__name__}")
@@ -741,7 +740,7 @@ class RemoteExecutor(BaseExecutor):
         # Send input code / data to worker machine if it's not local.
         if hostname not in machine:
             try:
-                cmd = f'{RSYNC} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
+                cmd = f'{SCP} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
                 start_ts = time.time()
                 logger.info(f"{bundle}: Copying work to {worker} via {cmd}.")
                 run_silently(cmd)
@@ -875,7 +874,7 @@ class RemoteExecutor(BaseExecutor):
             if not was_cancelled:
                 assert bundle.machine is not None
                 if bundle.hostname not in bundle.machine:
-                    cmd = f'{RSYNC} {username}@{machine}:{result_file} {result_file} 2>/dev/null'
+                    cmd = f'{SCP} {username}@{machine}:{result_file} {result_file} 2>/dev/null'
                     logger.info(
                         f"{bundle}: Fetching results from {username}@{machine} via {cmd}"
                     )