5 logger = logging.getLogger('run-command')
6 log_handler = logging.StreamHandler()
7 log_handler.setFormatter(logging.Formatter("run-command: %(message)s"))
8 logger.addHandler(log_handler)
9 logger.setLevel(logging.INFO)
17 import crunchutil.subst as subst
19 import arvados.commands.put as put
25 import multiprocessing
26 import crunchutil.robust_put as robust_put
27 import crunchutil.vwd as vwd
32 parser = argparse.ArgumentParser()
33 parser.add_argument('--dry-run', action='store_true')
34 parser.add_argument('--script-parameters', type=str, default="{}")
35 args = parser.parse_args()
40 api = arvados.api('v1')
41 t = arvados.current_task().tmpdir
42 os.chdir(arvados.current_task().tmpdir)
51 jobp = arvados.current_job()['script_parameters']
52 if len(arvados.current_task()['parameters']) > 0:
53 taskp = arvados.current_task()['parameters']
56 jobp = json.loads(args.job_parameters)
57 os.environ['JOB_UUID'] = 'zzzzz-8i9sb-1234567890abcde'
58 os.environ['TASK_UUID'] = 'zzzzz-ot0gb-1234567890abcde'
59 os.environ['CRUNCH_SRC'] = '/tmp/crunche-src'
60 if 'TASK_KEEPMOUNT' not in os.environ:
61 os.environ['TASK_KEEPMOUNT'] = '/keep'
66 return os.path.join(arvados.current_task().tmpdir, 'tmpdir')
72 return str(multiprocessing.cpu_count())
75 return os.environ['JOB_UUID']
78 return os.environ['TASK_UUID']
81 return os.environ['CRUNCH_SRC']
83 subst.default_subs["task.tmpdir"] = sub_tmpdir
84 subst.default_subs["task.outdir"] = sub_outdir
85 subst.default_subs["job.srcdir"] = sub_jobsrc
86 subst.default_subs["node.cores"] = sub_cores
87 subst.default_subs["job.uuid"] = sub_jobid
88 subst.default_subs["task.uuid"] = sub_taskid
90 class SigHandler(object):
94 def send_signal(self, sp, signum):
95 sp.send_signal(signum)
98 def add_to_group(gr, match):
102 gr[m].append(match.group(0))
104 def expand_item(p, c):
105 if isinstance(c, dict):
106 if "foreach" in c and "command" in c:
108 items = get_items(p, p[var])
111 params = copy.copy(p)
113 r.extend(expand_list(params, c["command"]))
115 if "list" in c and "index" in c and "command" in c:
117 items = get_items(p, p[var])
118 params = copy.copy(p)
119 params[var] = items[int(c["index"])]
120 return expand_list(params, c["command"])
122 pattern = re.compile(c["regex"])
124 items = get_items(p, p[c["filter"]])
125 return [i for i in items if pattern.match(i)]
127 items = get_items(p, p[c["group"]])
130 match = pattern.match(i)
132 add_to_group(groups, match)
133 return [groups[k] for k in groups]
135 items = get_items(p, p[c["extract"]])
138 match = pattern.match(i)
140 r.append(list(match.groups()))
142 elif isinstance(c, list):
143 return expand_list(p, c)
144 elif isinstance(c, basestring):
145 return [subst.do_substitution(p, c)]
149 def expand_list(p, l):
150 if isinstance(l, basestring):
151 return expand_item(p, l)
153 return [exp for arg in l for exp in expand_item(p, arg)]
155 def get_items(p, value):
156 if isinstance(value, dict):
157 return expand_item(p, value)
159 if isinstance(value, list):
160 return expand_list(p, value)
162 fn = subst.do_substitution(p, value)
163 mode = os.stat(fn).st_mode
164 prefix = fn[len(os.environ['TASK_KEEPMOUNT'])+1:]
166 if stat.S_ISDIR(mode):
167 items = [os.path.join(fn, l) for l in os.listdir(fn)]
168 elif stat.S_ISREG(mode):
170 items = [line.rstrip("\r\n") for line in f]
181 def recursive_foreach(params, fvars):
184 items = get_items(params, params[var])
185 logger.info("parallelizing on %s with items %s" % (var, items))
186 if items is not None:
188 params = copy.copy(params)
191 recursive_foreach(params, fvars)
194 arvados.api().job_tasks().create(body={
195 'job_uuid': arvados.current_job()['uuid'],
196 'created_by_job_task_uuid': arvados.current_task()['uuid'],
201 logger.info(expand_list(params, params["command"]))
203 logger.error("parameter %s with value %s in task.foreach yielded no items" % (var, params[var]))
207 if "task.foreach" in jobp:
208 if args.dry_run or arvados.current_task()['sequence'] == 0:
209 # This is the first task to start the other tasks and exit
210 fvars = jobp["task.foreach"]
211 if isinstance(fvars, basestring):
213 if not isinstance(fvars, list) or len(fvars) == 0:
214 logger.error("value of task.foreach must be a string or non-empty list")
216 recursive_foreach(jobp, jobp["task.foreach"])
218 if "task.vwd" in jobp:
219 # Set output of the first task to the base vwd collection so it
220 # will be merged with output fragments from the other tasks by
222 arvados.current_task().set_output(subst.do_substitution(jobp, jobp["task.vwd"]))
224 arvados.current_task().set_output(None)
227 # This is the only task so taskp/jobp are the same
231 if "task.vwd" in taskp:
232 # Populate output directory with symlinks to files in collection
233 vwd.checkout(subst.do_substitution(taskp, taskp["task.vwd"]), outdir)
235 if "task.cwd" in taskp:
236 os.chdir(subst.do_substitution(taskp, taskp["task.cwd"]))
238 if "piped_commands" in taskp:
240 for c in taskp["piped_commands"]:
241 cmd += expand_list(taskp, c)
243 cmd = [expand_list(taskp, taskp["command"])]
246 if "task.stdin" in taskp:
247 stdinname = subst.do_substitution(taskp, taskp["task.stdin"])
248 stdinfile = open(stdinname, "rb")
250 if "task.stdout" in taskp:
251 stdoutname = subst.do_substitution(taskp, taskp["task.stdout"])
252 stdoutfile = open(stdoutname, "wb")
254 logger.info("{}{}{}".format(' '.join(cmd), (" < " + stdinname) if stdinname is not None else "", (" > " + stdoutname) if stdoutname is not None else ""))
258 except subst.SubstitutionError as e:
260 logger.error("task parameters were:")
261 logger.error(pprint.pformat(taskp))
263 except Exception as e:
264 logger.exception("caught exception")
265 logger.error("task parameters were:")
266 logger.error(pprint.pformat(taskp))
271 next_cmd_stdin = stdinfile
272 for i in xrange(len(cmd)):
274 next_cmd_stdout = stdoutfile
276 next_cmd_stdout = subprocess.PIPE
277 sp = subprocess.Popen(cmd, shell=False, stdin=next_cmd_stdin, stdout=next_cmd_stdout)
278 next_cmd_stdin = sp.stdout
282 # forward signals to the process.
283 signal.signal(signal.SIGINT, lambda signum, frame: sig.send_signal(sp, signum))
284 signal.signal(signal.SIGTERM, lambda signum, frame: sig.send_signal(sp, signum))
285 signal.signal(signal.SIGQUIT, lambda signum, frame: sig.send_signal(sp, signum))
287 subprocesses.append(sp)
289 # wait for process to complete.
292 if sig.sig is not None:
293 logger.critical("terminating on signal %s" % sig.sig)
296 logger.info("completed with exit code %i (%s)" % (rcode, "success" if rcode == 0 else "failed"))
298 except Exception as e:
299 logger.exception("caught exception")
301 # restore default signal handlers.
302 signal.signal(signal.SIGINT, signal.SIG_DFL)
303 signal.signal(signal.SIGTERM, signal.SIG_DFL)
304 signal.signal(signal.SIGQUIT, signal.SIG_DFL)
309 logger.info("the following output files will be saved to keep:")
311 subprocess.call(["find", ".", "-type", "f", "-printf", "run-command: %12.12s %h/%f\\n"], stdout=sys.stderr)
313 logger.info("start writing output to keep")
315 if "task.vwd" in taskp:
316 if "task.foreach" in jobp:
317 # This is a subtask, so don't merge with the original collection, that will happen at the end
318 outcollection = vwd.checkin(subst.do_substitution(taskp, taskp["task.vwd"]), outdir, merge=False).manifest_text()
320 # Just a single task, so do merge with the original collection
321 outcollection = vwd.checkin(subst.do_substitution(taskp, taskp["task.vwd"]), outdir, merge=True).manifest_text()
323 outcollection = robust_put.upload(outdir, logger)
325 api.job_tasks().update(uuid=arvados.current_task()['uuid'],
327 'output': outcollection,
328 'success': (rcode == 0),