3 # Implement cwl-runner interface for submitting and running work on Arvados, using
4 # either the Crunch jobs API or Crunch containers API.
14 from functools import partial
15 import pkg_resources # part of setuptools
17 from cwltool.errors import WorkflowException
19 import cwltool.workflow
25 from .arvcontainer import ArvadosContainer, RunnerContainer
26 from .arvjob import ArvadosJob, RunnerJob, RunnerTemplate
27 from. runner import Runner, upload_instance
28 from .arvtool import ArvadosCommandTool
29 from .arvworkflow import ArvadosWorkflow, upload_workflow
30 from .fsaccess import CollectionFsAccess
31 from .perf import Perf
32 from .pathmapper import FinalOutputPathMapper
33 from ._version import __version__
35 from cwltool.pack import pack
36 from cwltool.process import shortname, UnsupportedRequirement, getListing
37 from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
38 from cwltool.draft2tool import compute_checksums
39 from arvados.api import OrderedJsonModel
41 logger = logging.getLogger('arvados.cwl-runner')
42 metrics = logging.getLogger('arvados.cwl-runner.metrics')
43 logger.setLevel(logging.INFO)
46 class ArvCwlRunner(object):
47 """Execute a CWL tool or workflow, submit work (using either jobs or
48 containers API), wait for them to complete, and report output.
52 def __init__(self, api_client, work_api=None, keep_client=None, output_name=None, output_tags=None):
55 self.lock = threading.Lock()
56 self.cond = threading.Condition(self.lock)
57 self.final_output = None
58 self.final_status = None
62 self.stop_polling = threading.Event()
65 self.final_output_collection = None
66 self.output_name = output_name
67 self.output_tags = output_tags
68 self.project_uuid = None
70 if keep_client is not None:
71 self.keep_client = keep_client
73 self.keep_client = arvados.keep.KeepClient(api_client=self.api, num_retries=self.num_retries)
75 for api in ["jobs", "containers"]:
77 methods = self.api._rootDesc.get('resources')[api]['methods']
78 if ('httpMethod' in methods['create'] and
79 (work_api == api or work_api is None)):
86 raise Exception("No supported APIs")
88 raise Exception("Unsupported API '%s'" % work_api)
90 def arv_make_tool(self, toolpath_object, **kwargs):
91 kwargs["work_api"] = self.work_api
92 if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
93 return ArvadosCommandTool(self, toolpath_object, **kwargs)
94 elif "class" in toolpath_object and toolpath_object["class"] == "Workflow":
95 return ArvadosWorkflow(self, toolpath_object, **kwargs)
97 return cwltool.workflow.defaultMakeTool(toolpath_object, **kwargs)
99 def output_callback(self, out, processStatus):
100 if processStatus == "success":
101 logger.info("Overall process status is %s", processStatus)
103 self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
104 body={"state": "Complete"}).execute(num_retries=self.num_retries)
106 logger.warn("Overall process status is %s", processStatus)
108 self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
109 body={"state": "Failed"}).execute(num_retries=self.num_retries)
110 self.final_status = processStatus
111 self.final_output = out
113 def on_message(self, event):
114 if "object_uuid" in event:
115 if event["object_uuid"] in self.processes and event["event_type"] == "update":
116 if event["properties"]["new_attributes"]["state"] == "Running" and self.processes[event["object_uuid"]].running is False:
117 uuid = event["object_uuid"]
119 j = self.processes[uuid]
120 logger.info("Job %s (%s) is Running", j.name, uuid)
122 j.update_pipeline_component(event["properties"]["new_attributes"])
123 elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled"):
124 uuid = event["object_uuid"]
127 j = self.processes[uuid]
128 txt = self.work_api[0].upper() + self.work_api[1:-1]
129 logger.info("%s %s (%s) is %s", txt, j.name, uuid, event["properties"]["new_attributes"]["state"])
130 with Perf(metrics, "done %s" % j.name):
131 j.done(event["properties"]["new_attributes"])
136 def poll_states(self):
137 """Poll status of jobs or containers listed in the processes dict.
139 Runs in a separate thread.
143 self.stop_polling.wait(15)
144 if self.stop_polling.is_set():
147 keys = self.processes.keys()
151 if self.work_api == "containers":
152 table = self.poll_api.containers()
153 elif self.work_api == "jobs":
154 table = self.poll_api.jobs()
157 proc_states = table.list(filters=[["uuid", "in", keys]]).execute(num_retries=self.num_retries)
158 except Exception as e:
159 logger.warn("Error checking states on API server: %s", e)
162 for p in proc_states["items"]:
164 "object_uuid": p["uuid"],
165 "event_type": "update",
171 def get_uploaded(self):
172 return self.uploaded.copy()
174 def add_uploaded(self, src, pair):
175 self.uploaded[src] = pair
177 def check_writable(self, obj):
178 if isinstance(obj, dict):
179 if obj.get("writable"):
180 raise UnsupportedRequirement("InitialWorkDir feature 'writable: true' not supported")
181 for v in obj.itervalues():
182 self.check_writable(v)
183 if isinstance(obj, list):
185 self.check_writable(v)
187 def make_output_collection(self, name, tagsString, outputObj):
188 outputObj = copy.deepcopy(outputObj)
191 def capture(fileobj):
192 files.append(fileobj)
194 adjustDirObjs(outputObj, capture)
195 adjustFileObjs(outputObj, capture)
197 generatemapper = FinalOutputPathMapper(files, "", "", separateDirs=False)
199 final = arvados.collection.Collection(api_client=self.api,
200 keep_client=self.keep_client,
201 num_retries=self.num_retries)
204 for k,v in generatemapper.items():
205 if k.startswith("_:"):
206 if v.type == "Directory":
208 if v.type == "CreateFile":
209 with final.open(v.target, "wb") as f:
210 f.write(v.resolved.encode("utf-8"))
213 if not k.startswith("keep:"):
214 raise Exception("Output source is not in keep or a literal")
216 srccollection = sp[0][5:]
217 if srccollection not in srccollections:
219 srccollections[srccollection] = arvados.collection.CollectionReader(
222 keep_client=self.keep_client,
223 num_retries=self.num_retries)
224 except arvados.errors.ArgumentError as e:
225 logger.error("Creating CollectionReader for '%s' '%s': %s", k, v, e)
227 reader = srccollections[srccollection]
229 srcpath = "/".join(sp[1:]) if len(sp) > 1 else "."
230 final.copy(srcpath, v.target, source_collection=reader, overwrite=False)
232 logger.warn("While preparing output collection: %s", e)
234 def rewrite(fileobj):
235 fileobj["location"] = generatemapper.mapper(fileobj["location"]).target
236 for k in ("basename", "listing", "contents"):
240 adjustDirObjs(outputObj, rewrite)
241 adjustFileObjs(outputObj, rewrite)
243 with final.open("cwl.output.json", "w") as f:
244 json.dump(outputObj, f, sort_keys=True, indent=4, separators=(',',': '))
246 final.save_new(name=name, owner_uuid=self.project_uuid, ensure_unique_name=True)
248 logger.info("Final output collection %s \"%s\" (%s)", final.portable_data_hash(),
249 final.api_response()["name"],
250 final.manifest_locator())
252 final_uuid = final.manifest_locator()
253 tags = tagsString.split(',')
255 self.api.links().create(body={
256 "head_uuid": final_uuid, "link_class": "tag", "name": tag
257 }).execute(num_retries=self.num_retries)
259 def finalcollection(fileobj):
260 fileobj["location"] = "keep:%s/%s" % (final.portable_data_hash(), fileobj["location"])
262 adjustDirObjs(outputObj, finalcollection)
263 adjustFileObjs(outputObj, finalcollection)
265 return (outputObj, final)
267 def set_crunch_output(self):
268 if self.work_api == "containers":
270 current = self.api.containers().current().execute(num_retries=self.num_retries)
271 self.api.containers().update(uuid=current['uuid'],
273 'output': self.final_output_collection.portable_data_hash(),
274 }).execute(num_retries=self.num_retries)
275 except Exception as e:
276 logger.info("Setting container output: %s", e)
277 elif self.work_api == "jobs" and "TASK_UUID" in os.environ:
278 self.api.job_tasks().update(uuid=os.environ["TASK_UUID"],
280 'output': self.final_output_collection.portable_data_hash(),
281 'success': self.final_status == "success",
283 }).execute(num_retries=self.num_retries)
285 def arv_executor(self, tool, job_order, **kwargs):
286 self.debug = kwargs.get("debug")
288 tool.visit(self.check_writable)
290 useruuid = self.api.users().current().execute()["uuid"]
291 self.project_uuid = kwargs.get("project_uuid") if kwargs.get("project_uuid") else useruuid
293 make_fs_access = kwargs.get("make_fs_access") or partial(CollectionFsAccess,
295 keep_client=self.keep_client)
296 self.fs_access = make_fs_access(kwargs["basedir"])
298 if kwargs.get("create_template"):
299 tmpl = RunnerTemplate(self, tool, job_order, kwargs.get("enable_reuse"))
301 # cwltool.main will write our return value to stdout.
304 if kwargs.get("create_workflow") or kwargs.get("update_workflow"):
305 return upload_workflow(self, tool, job_order, self.project_uuid, kwargs.get("update_workflow"))
307 self.ignore_docker_for_reuse = kwargs.get("ignore_docker_for_reuse")
309 kwargs["make_fs_access"] = make_fs_access
310 kwargs["enable_reuse"] = kwargs.get("enable_reuse")
311 kwargs["use_container"] = True
312 kwargs["tmpdir_prefix"] = "tmp"
313 kwargs["on_error"] = "continue"
314 kwargs["compute_checksum"] = kwargs.get("compute_checksum")
316 if self.work_api == "containers":
317 kwargs["outdir"] = "/var/spool/cwl"
318 kwargs["docker_outdir"] = "/var/spool/cwl"
319 kwargs["tmpdir"] = "/tmp"
320 kwargs["docker_tmpdir"] = "/tmp"
321 elif self.work_api == "jobs":
322 kwargs["outdir"] = "$(task.outdir)"
323 kwargs["docker_outdir"] = "$(task.outdir)"
324 kwargs["tmpdir"] = "$(task.tmpdir)"
326 upload_instance(self, shortname(tool.tool["id"]), tool, job_order)
329 if kwargs.get("submit"):
330 if self.work_api == "containers":
331 if tool.tool["class"] == "CommandLineTool":
332 runnerjob = tool.job(job_order,
333 self.output_callback,
336 runnerjob = RunnerContainer(self, tool, job_order, kwargs.get("enable_reuse"), self.output_name, self.output_tags)
338 runnerjob = RunnerJob(self, tool, job_order, kwargs.get("enable_reuse"), self.output_name, self.output_tags)
340 if not kwargs.get("submit") and "cwl_runner_job" not in kwargs and not self.work_api == "containers":
341 # Create pipeline for local run
342 self.pipeline = self.api.pipeline_instances().create(
344 "owner_uuid": self.project_uuid,
345 "name": shortname(tool.tool["id"]),
347 "state": "RunningOnClient"}).execute(num_retries=self.num_retries)
348 logger.info("Pipeline instance %s", self.pipeline["uuid"])
350 if runnerjob and not kwargs.get("wait"):
351 runnerjob.run(wait=kwargs.get("wait"))
352 return runnerjob.uuid
354 self.poll_api = arvados.api('v1')
355 self.polling_thread = threading.Thread(target=self.poll_states)
356 self.polling_thread.start()
359 jobiter = iter((runnerjob,))
361 if "cwl_runner_job" in kwargs:
362 self.uuid = kwargs.get("cwl_runner_job").get('uuid')
363 jobiter = tool.job(job_order,
364 self.output_callback,
369 # Will continue to hold the lock for the duration of this code
370 # except when in cond.wait(), at which point on_message can update
371 # job state and process output callbacks.
373 loopperf = Perf(metrics, "jobiter")
375 for runnable in jobiter:
378 with Perf(metrics, "run"):
379 runnable.run(**kwargs)
384 logger.error("Workflow is deadlocked, no runnable jobs and not waiting on any pending jobs.")
389 while self.processes:
392 except UnsupportedRequirement:
395 if sys.exc_info()[0] is KeyboardInterrupt:
396 logger.error("Interrupted, marking pipeline as failed")
398 logger.error("Caught unhandled exception, marking pipeline as failed. Error was: %s", sys.exc_info()[1], exc_info=(sys.exc_info()[1] if self.debug else False))
400 self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
401 body={"state": "Failed"}).execute(num_retries=self.num_retries)
402 if runnerjob and runnerjob.uuid and self.work_api == "containers":
403 self.api.container_requests().update(uuid=runnerjob.uuid,
404 body={"priority": "0"}).execute(num_retries=self.num_retries)
407 self.stop_polling.set()
408 self.polling_thread.join()
410 if self.final_status == "UnsupportedRequirement":
411 raise UnsupportedRequirement("Check log for details.")
413 if self.final_output is None:
414 raise WorkflowException("Workflow did not return a result.")
416 if kwargs.get("submit") and isinstance(runnerjob, Runner):
417 logger.info("Final output collection %s", runnerjob.final_output)
419 if self.output_name is None:
420 self.output_name = "Output of %s" % (shortname(tool.tool["id"]))
421 if self.output_tags is None:
422 self.output_tags = ""
423 self.final_output, self.final_output_collection = self.make_output_collection(self.output_name, self.output_tags, self.final_output)
424 self.set_crunch_output()
426 if self.final_status != "success":
427 raise WorkflowException("Workflow failed.")
429 if kwargs.get("compute_checksum"):
430 adjustDirObjs(self.final_output, partial(getListing, self.fs_access))
431 adjustFileObjs(self.final_output, partial(compute_checksums, self.fs_access))
433 return self.final_output
437 """Print version string of key packages for provenance and debugging."""
439 arvcwlpkg = pkg_resources.require("arvados-cwl-runner")
440 arvpkg = pkg_resources.require("arvados-python-client")
441 cwlpkg = pkg_resources.require("cwltool")
443 return "%s %s %s, %s %s, %s %s" % (sys.argv[0], __version__, arvcwlpkg[0].version,
444 "arvados-python-client", arvpkg[0].version,
445 "cwltool", cwlpkg[0].version)
448 def arg_parser(): # type: () -> argparse.ArgumentParser
449 parser = argparse.ArgumentParser(description='Arvados executor for Common Workflow Language')
451 parser.add_argument("--basedir", type=str,
452 help="Base directory used to resolve relative references in the input, default to directory of input object file or current directory (if inputs piped/provided on command line).")
453 parser.add_argument("--outdir", type=str, default=os.path.abspath('.'),
454 help="Output directory, default current directory")
456 parser.add_argument("--eval-timeout",
457 help="Time to wait for a Javascript expression to evaluate before giving an error, default 20s.",
460 parser.add_argument("--version", action="store_true", help="Print version and exit")
462 exgroup = parser.add_mutually_exclusive_group()
463 exgroup.add_argument("--verbose", action="store_true", help="Default logging")
464 exgroup.add_argument("--quiet", action="store_true", help="Only print warnings and errors.")
465 exgroup.add_argument("--debug", action="store_true", help="Print even more logging")
467 parser.add_argument("--metrics", action="store_true", help="Print timing metrics")
469 parser.add_argument("--tool-help", action="store_true", help="Print command line help for tool")
471 exgroup = parser.add_mutually_exclusive_group()
472 exgroup.add_argument("--enable-reuse", action="store_true",
473 default=True, dest="enable_reuse",
475 exgroup.add_argument("--disable-reuse", action="store_false",
476 default=True, dest="enable_reuse",
479 parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
480 parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None)
481 parser.add_argument("--output-tags", type=str, help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
482 parser.add_argument("--ignore-docker-for-reuse", action="store_true",
483 help="Ignore Docker image version when deciding whether to reuse past jobs.",
486 exgroup = parser.add_mutually_exclusive_group()
487 exgroup.add_argument("--submit", action="store_true", help="Submit workflow to run on Arvados.",
488 default=True, dest="submit")
489 exgroup.add_argument("--local", action="store_false", help="Run workflow on local host (submits jobs to Arvados).",
490 default=True, dest="submit")
491 exgroup.add_argument("--create-template", action="store_true", help="Create an Arvados pipeline template.")
492 exgroup.add_argument("--create-workflow", action="store_true", help="Create an Arvados workflow.")
493 exgroup.add_argument("--update-workflow", type=str, metavar="UUID", help="Update existing Arvados workflow with uuid.")
495 exgroup = parser.add_mutually_exclusive_group()
496 exgroup.add_argument("--wait", action="store_true", help="After submitting workflow runner job, wait for completion.",
497 default=True, dest="wait")
498 exgroup.add_argument("--no-wait", action="store_false", help="Submit workflow runner job and exit.",
499 default=True, dest="wait")
501 parser.add_argument("--api", type=str,
502 default=None, dest="work_api",
503 help="Select work submission API, one of 'jobs' or 'containers'. Default is 'jobs' if that API is available, otherwise 'containers'.")
505 parser.add_argument("--compute-checksum", action="store_true", default=False,
506 help="Compute checksum of contents while collecting outputs",
507 dest="compute_checksum")
509 parser.add_argument("workflow", type=str, nargs="?", default=None, help="The workflow to execute")
510 parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.")
516 res = pkg_resources.resource_stream(__name__, 'arv-cwl-schema.yml')
517 cache["http://arvados.org/cwl"] = res.read()
519 document_loader, cwlnames, _, _ = cwltool.process.get_schema("v1.0")
520 _, extnames, _, _ = schema_salad.schema.load_schema("http://arvados.org/cwl", cache=cache)
521 for n in extnames.names:
522 if not cwlnames.has_name("http://arvados.org/cwl#"+n, ""):
523 cwlnames.add_name("http://arvados.org/cwl#"+n, "", extnames.get_name(n, ""))
524 document_loader.idx["http://arvados.org/cwl#"+n] = {}
526 def main(args, stdout, stderr, api_client=None, keep_client=None):
527 parser = arg_parser()
529 job_order_object = None
530 arvargs = parser.parse_args(args)
531 if (arvargs.create_template or arvargs.create_workflow or arvargs.update_workflow) and not arvargs.job_order:
532 job_order_object = ({}, "")
537 if api_client is None:
538 api_client=arvados.api('v1', model=OrderedJsonModel())
539 runner = ArvCwlRunner(api_client, work_api=arvargs.work_api, keep_client=keep_client, output_name=arvargs.output_name, output_tags=arvargs.output_tags)
540 except Exception as e:
545 logger.setLevel(logging.DEBUG)
548 logger.setLevel(logging.WARN)
549 logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
552 metrics.setLevel(logging.DEBUG)
553 logging.getLogger("cwltool.metrics").setLevel(logging.DEBUG)
555 arvargs.conformance_test = None
556 arvargs.use_container = True
558 return cwltool.main.main(args=arvargs,
561 executor=runner.arv_executor,
562 makeTool=runner.arv_make_tool,
563 versionfunc=versionstring,
564 job_order_object=job_order_object,
565 make_fs_access=partial(CollectionFsAccess, api_client=api_client))