extra / after dir
[arvados.git] / crunch_scripts / run-command
1 #!/usr/bin/env python
2
3 import arvados
4 import re
5 import os
6 import subprocess
7 import sys
8 import shutil
9 import subst
10 import time
11 import arvados.commands.put as put
12 import signal
13 import stat
14 import copy
15 import traceback
16 import pprint
17
18 os.umask(0077)
19
20 t = arvados.current_task().tmpdir
21
22 api = arvados.api('v1')
23
24 os.chdir(arvados.current_task().tmpdir)
25 os.mkdir("tmpdir")
26 os.mkdir("output")
27
28 os.chdir("output")
29
30 taskp = None
31 jobp = arvados.current_job()['script_parameters']
32 if len(arvados.current_task()['parameters']) > 0:
33     p = arvados.current_task()['parameters']
34
35 links = []
36
37 def sub_link(v):
38     r = os.path.basename(v)
39     os.symlink(os.path.join(os.environ['TASK_KEEPMOUNT'], v) , r)
40     links.append(r)
41     return r
42
43 def sub_tmpdir(v):
44     return os.path.join(arvados.current_task().tmpdir, 'tmpdir')
45
46 def sub_cores(v):
47      return os.environ['CRUNCH_NODE_SLOTS']
48
49 def sub_jobid(v):
50      return os.environ['JOB_UUID']
51
52 def sub_taskid(v):
53      return os.environ['TASK_UUID']
54
55 subst.default_subs["link "] = sub_link
56 subst.default_subs["task.tmpdir"] = sub_tmpdir
57 subst.default_subs["node.cores"] = sub_cores
58 subst.default_subs["job.id"] = sub_jobid
59 subst.default_subs["task.id"] = sub_taskid
60
61 rcode = 1
62
63 def machine_progress(bytes_written, bytes_expected):
64     return "run-command: wrote {} total {}\n".format(
65         bytes_written, -1 if (bytes_expected is None) else bytes_expected)
66
67 class SigHandler(object):
68     def __init__(self):
69         self.sig = None
70
71     def send_signal(self, sp, signum):
72         sp.send_signal(signum)
73         self.sig = signum
74
75 def expand_item(p, c):
76     if isinstance(c, dict):
77         if "foreach" in c and "command" in c:
78             var = c["foreach"]
79             items = get_items(p, p[var])
80             r = []
81             for i in items:
82                 params = copy.copy(p)
83                 params[var] = i
84                 r.extend(expand_list(params, c["command"]))
85             return r
86     elif isinstance(c, list):
87         return expand_list(p, c)
88     elif isinstance(c, str):
89         return [subst.do_substitution(p, c)]
90
91     return []
92
93 def expand_list(p, l):
94     return [exp for arg in l for exp in expand_item(p, arg)]
95
96 def get_items(p, value):
97     if isinstance(value, list):
98         return expand_list(p, value)
99
100     fn = subst.do_substitution(p, value)
101     mode = os.stat(fn).st_mode
102     prefix = fn[len(os.environ['TASK_KEEPMOUNT'])+1:]
103     if mode != None:
104         if stat.S_ISDIR(mode):
105             items = ["$(dir %s/%s/)" % (prefix, l) for l in os.listdir(fn)]
106         elif stat.S_ISREG(mode):
107             with open(fn) as f:
108                 items = [line for line in f]
109         return items
110     else:
111         return None
112
113 stdoutname = None
114 stdoutfile = None
115
116 try:
117     if "task.foreach" in jobp:
118         if arvados.current_task()['sequence'] == 0:
119             var = jobp["task.foreach"]
120             items = get_items(jobp, jobp[var])
121             print("run-command: parallelizing on %s with items %s" % (var, items))
122             if items != None:
123                 for i in items:
124                     params = copy.copy(jobp)
125                     params[var] = i
126                     arvados.api().job_tasks().create(body={
127                         'job_uuid': arvados.current_job()['uuid'],
128                         'created_by_job_task_uuid': arvados.current_task()['uuid'],
129                         'sequence': 1,
130                         'parameters': params
131                         }
132                     ).execute()
133                 arvados.current_task().set_output(None)
134                 sys.exit(0)
135             else:
136                 sys.exit(1)
137     else:
138         p = jobp
139
140     cmd = expand_list(p, p["command"])
141
142     if "save.stdout" in p:
143         stdoutname = subst.do_substitution(p, p["save.stdout"])
144         stdoutfile = open(stdoutname, "wb")
145
146     print("run-command: {}{}".format(' '.join(cmd), (" > " + stdoutname) if stdoutname != None else ""))
147
148 except Exception as e:
149     print("run-command: caught exception:")
150     traceback.print_exc(file=sys.stdout)
151     print("run-command: task parameters was:")
152     pprint.pprint(p)
153     sys.exit(1)
154
155 try:
156     sp = subprocess.Popen(cmd, shell=False, stdout=stdoutfile)
157     sig = SigHandler()
158
159     # forward signals to the process.
160     signal.signal(signal.SIGINT, lambda signum, frame: sig.send_signal(sp, signum))
161     signal.signal(signal.SIGTERM, lambda signum, frame: sig.send_signal(sp, signum))
162     signal.signal(signal.SIGQUIT, lambda signum, frame: sig.send_signal(sp, signum))
163
164     # wait for process to complete.
165     rcode = sp.wait()
166
167     if sig.sig != None:
168         print("run-command: terminating on signal %s" % sig.sig)
169         sys.exit(2)
170     else:
171         print("run-command: completed with exit code %i (%s)" % (rcode, "success" if rcode == 0 else "failed"))
172
173 except Exception as e:
174     print("run-command: caught exception:")
175     traceback.print_exc(file=sys.stdout)
176
177 # restore default signal handlers.
178 signal.signal(signal.SIGINT, signal.SIG_DFL)
179 signal.signal(signal.SIGTERM, signal.SIG_DFL)
180 signal.signal(signal.SIGQUIT, signal.SIG_DFL)
181
182 for l in links:
183     os.unlink(l)
184
185 print("run-command: the following output files will be saved to keep:")
186
187 subprocess.call(["find", ".", "-type", "f", "-printf", "run-command: %12.12s %h/%f\\n"])
188
189 print("run-command: start writing output to keep")
190
191 done = False
192 resume_cache = put.ResumeCache(os.path.join(arvados.current_task().tmpdir, "upload-output-checkpoint"))
193 reporter = put.progress_writer(machine_progress)
194 bytes_expected = put.expected_bytes_for(".")
195 while not done:
196     try:
197         out = put.ArvPutCollectionWriter.from_cache(resume_cache, reporter, bytes_expected)
198         out.do_queued_work()
199         out.write_directory_tree(".", max_manifest_depth=0)
200         outuuid = out.finish()
201         api.job_tasks().update(uuid=arvados.current_task()['uuid'],
202                                              body={
203                                                  'output':outuuid,
204                                                  'success': (rcode == 0),
205                                                  'progress':1.0
206                                              }).execute()
207         done = True
208     except KeyboardInterrupt:
209         print("run-command: terminating on signal 2")
210         sys.exit(2)
211     except Exception as e:
212         print("run-command: caught exception:")
213         traceback.print_exc(file=sys.stdout)
214         time.sleep(5)
215
216 sys.exit(rcode)