1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
12 from cwltool.process import get_feature, shortname, UnsupportedRequirement
13 from cwltool.errors import WorkflowException
14 from cwltool.command_line_tool import revmap_file, CommandLineTool
15 from cwltool.load_tool import fetch_document
16 from cwltool.builder import Builder
17 from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
19 from schema_salad.sourceline import SourceLine
21 import ruamel.yaml as yaml
23 import arvados.collection
24 from arvados.errors import ApiError
26 from .arvdocker import arv_docker_get_image
27 from .runner import Runner, arvados_jobs_image, packed_workflow, upload_workflow_collection, trim_anonymous_location, remove_redundant_fields
28 from .pathmapper import VwdPathMapper, trim_listing
29 from .perf import Perf
31 from ._version import __version__
33 logger = logging.getLogger('arvados.cwl-runner')
34 metrics = logging.getLogger('arvados.cwl-runner.metrics')
36 crunchrunner_re = re.compile(r"^.*crunchrunner: \$\(task\.(tmpdir|outdir|keep)\)=(.*)$")
38 crunchrunner_git_commit = 'a3f2cb186e437bfce0031b024b2157b73ed2717d'
40 class ArvadosJob(object):
41 """Submit and manage a Crunch job for executing a CWL CommandLineTool."""
43 def __init__(self, runner):
44 self.arvrunner = runner
48 def run(self, dry_run=False, pull_image=True, **kwargs):
50 "command": self.command_line
52 runtime_constraints = {}
54 with Perf(metrics, "generatefiles %s" % self.name):
55 if self.generatefiles["listing"]:
56 vwd = arvados.collection.Collection(api_client=self.arvrunner.api,
57 keep_client=self.arvrunner.keep_client,
58 num_retries=self.arvrunner.num_retries)
59 script_parameters["task.vwd"] = {}
60 generatemapper = VwdPathMapper([self.generatefiles], "", "",
63 with Perf(metrics, "createfiles %s" % self.name):
64 for f, p in generatemapper.items():
65 if p.type == "CreateFile":
66 with vwd.open(p.target, "w") as n:
67 n.write(p.resolved.encode("utf-8"))
70 with Perf(metrics, "generatefiles.save_new %s" % self.name):
71 info = self._get_intermediate_collection_info()
72 vwd.save_new(name=info["name"],
73 ensure_unique_name=True,
74 trash_at=info["trash_at"],
75 properties=info["properties"])
77 for f, p in generatemapper.items():
79 script_parameters["task.vwd"][p.target] = p.resolved
80 if p.type == "CreateFile":
81 script_parameters["task.vwd"][p.target] = "$(task.keep)/%s/%s" % (vwd.portable_data_hash(), p.target)
83 script_parameters["task.env"] = {"TMPDIR": self.tmpdir, "HOME": self.outdir}
85 script_parameters["task.env"].update(self.environment)
88 script_parameters["task.stdin"] = self.stdin
91 script_parameters["task.stdout"] = self.stdout
94 script_parameters["task.stderr"] = self.stderr
97 script_parameters["task.successCodes"] = self.successCodes
98 if self.temporaryFailCodes:
99 script_parameters["task.temporaryFailCodes"] = self.temporaryFailCodes
100 if self.permanentFailCodes:
101 script_parameters["task.permanentFailCodes"] = self.permanentFailCodes
103 with Perf(metrics, "arv_docker_get_image %s" % self.name):
104 (docker_req, docker_is_req) = get_feature(self, "DockerRequirement")
105 if docker_req and kwargs.get("use_container") is not False:
106 if docker_req.get("dockerOutputDirectory"):
107 raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
108 "Option 'dockerOutputDirectory' of DockerRequirement not supported.")
109 runtime_constraints["docker_image"] = arv_docker_get_image(self.arvrunner.api, docker_req, pull_image, self.arvrunner.project_uuid)
111 runtime_constraints["docker_image"] = "arvados/jobs"
113 resources = self.builder.resources
114 if resources is not None:
115 runtime_constraints["min_cores_per_node"] = resources.get("cores", 1)
116 runtime_constraints["min_ram_mb_per_node"] = resources.get("ram")
117 runtime_constraints["min_scratch_mb_per_node"] = resources.get("tmpdirSize", 0) + resources.get("outdirSize", 0)
119 runtime_req, _ = get_feature(self, "http://arvados.org/cwl#RuntimeConstraints")
121 if "keep_cache" in runtime_req:
122 runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"]
123 runtime_constraints["min_ram_mb_per_node"] += runtime_req["keep_cache"]
124 if "outputDirType" in runtime_req:
125 if runtime_req["outputDirType"] == "local_output_dir":
126 script_parameters["task.keepTmpOutput"] = False
127 elif runtime_req["outputDirType"] == "keep_output_dir":
128 script_parameters["task.keepTmpOutput"] = True
130 filters = [["repository", "=", "arvados"],
131 ["script", "=", "crunchrunner"],
132 ["script_version", "in git", crunchrunner_git_commit]]
133 if not self.arvrunner.ignore_docker_for_reuse:
134 filters.append(["docker_image_locator", "in docker", runtime_constraints["docker_image"]])
136 enable_reuse = kwargs.get("enable_reuse", True)
138 reuse_req, _ = get_feature(self, "http://arvados.org/cwl#ReuseRequirement")
140 enable_reuse = reuse_req["enableReuse"]
142 self.output_callback = self.arvrunner.get_wrapped_callback(self.output_callback)
145 with Perf(metrics, "create %s" % self.name):
146 response = self.arvrunner.api.jobs().create(
148 "owner_uuid": self.arvrunner.project_uuid,
149 "script": "crunchrunner",
150 "repository": "arvados",
151 "script_version": "master",
152 "minimum_script_version": crunchrunner_git_commit,
153 "script_parameters": {"tasks": [script_parameters]},
154 "runtime_constraints": runtime_constraints
157 find_or_create=enable_reuse
158 ).execute(num_retries=self.arvrunner.num_retries)
160 self.uuid = response["uuid"]
161 self.arvrunner.process_submitted(self)
163 self.update_pipeline_component(response)
165 if response["state"] == "Complete":
166 logger.info("%s reused job %s", self.arvrunner.label(self), response["uuid"])
167 # Give read permission to the desired project on reused jobs
168 if response["owner_uuid"] != self.arvrunner.project_uuid:
170 self.arvrunner.api.links().create(body={
171 'link_class': 'permission',
173 'tail_uuid': self.arvrunner.project_uuid,
174 'head_uuid': response["uuid"],
175 }).execute(num_retries=self.arvrunner.num_retries)
176 except ApiError as e:
177 # The user might not have "manage" access on the job: log
178 # a message and continue.
179 logger.info("Creating read permission on job %s: %s",
183 logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"])
184 except Exception as e:
185 logger.exception("%s error" % (self.arvrunner.label(self)))
186 self.output_callback({}, "permanentFail")
188 def update_pipeline_component(self, record):
189 with self.arvrunner.workflow_eval_lock:
190 if self.arvrunner.pipeline:
191 self.arvrunner.pipeline["components"][self.name] = {"job": record}
192 with Perf(metrics, "update_pipeline_component %s" % self.name):
193 self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(
194 uuid=self.arvrunner.pipeline["uuid"],
196 "components": self.arvrunner.pipeline["components"]
197 }).execute(num_retries=self.arvrunner.num_retries)
198 if self.arvrunner.uuid:
200 job = self.arvrunner.api.jobs().get(uuid=self.arvrunner.uuid).execute()
202 components = job["components"]
203 components[self.name] = record["uuid"]
204 self.arvrunner.api.jobs().update(
205 uuid=self.arvrunner.uuid,
207 "components": components
208 }).execute(num_retries=self.arvrunner.num_retries)
209 except Exception as e:
210 logger.info("Error adding to components: %s", e)
212 def done(self, record):
214 self.update_pipeline_component(record)
219 if record["state"] == "Complete":
220 processStatus = "success"
222 processStatus = "permanentFail"
227 with Perf(metrics, "inspect log %s" % self.name):
228 logc = arvados.collection.CollectionReader(record["log"],
229 api_client=self.arvrunner.api,
230 keep_client=self.arvrunner.keep_client,
231 num_retries=self.arvrunner.num_retries)
232 log = logc.open(logc.keys()[0])
239 # Determine the tmpdir, outdir and keep paths from
240 # the job run. Unfortunately, we can't take the first
241 # values we find (which are expected to be near the
242 # top) and stop scanning because if the node fails and
243 # the job restarts on a different node these values
244 # will different runs, and we need to know about the
245 # final run that actually produced output.
246 g = crunchrunner_re.match(l)
248 dirs[g.group(1)] = g.group(2)
250 if processStatus == "permanentFail":
251 done.logtail(logc, logger, "%s error log:" % self.arvrunner.label(self))
253 with Perf(metrics, "output collection %s" % self.name):
254 outputs = done.done(self, record, dirs["tmpdir"],
255 dirs["outdir"], dirs["keep"])
256 except WorkflowException as e:
257 logger.error("%s unable to collect output from %s:\n%s",
258 self.arvrunner.label(self), record["output"], e, exc_info=(e if self.arvrunner.debug else False))
259 processStatus = "permanentFail"
260 except Exception as e:
261 logger.exception("Got unknown exception while collecting output for job %s:", self.name)
262 processStatus = "permanentFail"
264 # Note: Currently, on error output_callback is expecting an empty dict,
265 # anything else will fail.
266 if not isinstance(outputs, dict):
267 logger.error("Unexpected output type %s '%s'", type(outputs), outputs)
269 processStatus = "permanentFail"
271 self.output_callback(outputs, processStatus)
273 def _get_intermediate_collection_info(self):
275 if self.arvrunner.intermediate_output_ttl > 0:
276 trash_time = datetime.datetime.now() + datetime.timedelta(seconds=self.arvrunner.intermediate_output_ttl)
278 current_container_uuid = None
280 current_container = self.arvrunner.api.containers().current().execute(num_retries=self.arvrunner.num_retries)
281 current_container_uuid = current_container['uuid']
282 except ApiError as e:
283 # Status code 404 just means we're not running in a container.
284 if e.resp.status != 404:
285 logger.info("Getting current container: %s", e)
286 props = {"type": "Intermediate",
287 "container": current_container_uuid}
289 return {"name" : "Intermediate collection",
290 "trash_at" : trash_time,
291 "properties" : props}
294 class RunnerJob(Runner):
295 """Submit and manage a Crunch job that runs crunch_scripts/cwl-runner."""
297 def arvados_job_spec(self, dry_run=False, pull_image=True, **kwargs):
298 """Create an Arvados job specification for this workflow.
300 The returned dict can be used to create a job (i.e., passed as
301 the +body+ argument to jobs().create()), or as a component in
302 a pipeline template or pipeline instance.
305 if self.tool.tool["id"].startswith("keep:"):
306 self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
308 packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
309 wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
310 self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
312 adjustDirObjs(self.job_order, trim_listing)
313 visit_class(self.job_order, ("File", "Directory"), trim_anonymous_location)
314 visit_class(self.job_order, ("File", "Directory"), remove_redundant_fields)
317 self.job_order["arv:output_name"] = self.output_name
320 self.job_order["arv:output_tags"] = self.output_tags
322 self.job_order["arv:enable_reuse"] = self.enable_reuse
325 self.job_order["arv:on_error"] = self.on_error
327 if kwargs.get("debug"):
328 self.job_order["arv:debug"] = True
331 "script": "cwl-runner",
332 "script_version": "master",
333 "minimum_script_version": "570509ab4d2ef93d870fd2b1f2eab178afb1bad9",
334 "repository": "arvados",
335 "script_parameters": self.job_order,
336 "runtime_constraints": {
337 "docker_image": arvados_jobs_image(self.arvrunner, self.jobs_image),
338 "min_ram_mb_per_node": self.submit_runner_ram
342 def run(self, **kwargs):
343 job_spec = self.arvados_job_spec(**kwargs)
345 job_spec.setdefault("owner_uuid", self.arvrunner.project_uuid)
347 job = self.arvrunner.api.jobs().create(
349 find_or_create=self.enable_reuse
350 ).execute(num_retries=self.arvrunner.num_retries)
352 for k,v in job_spec["script_parameters"].items():
353 if v is False or v is None or isinstance(v, dict):
354 job_spec["script_parameters"][k] = {"value": v}
356 del job_spec["owner_uuid"]
357 job_spec["job"] = job
360 "owner_uuid": self.arvrunner.project_uuid,
363 "cwl-runner": job_spec,
365 "state": "RunningOnServer",
367 if not self.enable_reuse:
368 instance_spec["properties"] = {"run_options": {"enable_job_reuse": False}}
370 self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().create(
371 body=instance_spec).execute(num_retries=self.arvrunner.num_retries)
372 logger.info("Created pipeline %s", self.arvrunner.pipeline["uuid"])
374 if kwargs.get("wait") is False:
375 self.uuid = self.arvrunner.pipeline["uuid"]
378 self.uuid = job["uuid"]
379 self.arvrunner.process_submitted(self)
382 class RunnerTemplate(object):
383 """An Arvados pipeline template that invokes a CWL workflow."""
385 type_to_dataclass = {
386 'boolean': 'boolean',
388 'Directory': 'Collection',
394 def __init__(self, runner, tool, job_order, enable_reuse, uuid,
395 submit_runner_ram=0, name=None, merged_map=None):
398 self.job = RunnerJob(
402 enable_reuse=enable_reuse,
405 submit_runner_ram=submit_runner_ram,
407 merged_map=merged_map)
410 def pipeline_component_spec(self):
411 """Return a component that Workbench and a-r-p-i will understand.
413 Specifically, translate CWL input specs to Arvados pipeline
414 format, like {"dataclass":"File","value":"xyz"}.
417 spec = self.job.arvados_job_spec()
419 # Most of the component spec is exactly the same as the job
420 # spec (script, script_version, etc.).
421 # spec['script_parameters'] isn't right, though. A component
422 # spec's script_parameters hash is a translation of
423 # self.tool.tool['inputs'] with defaults/overrides taken from
424 # the job order. So we move the job parameters out of the way
425 # and build a new spec['script_parameters'].
426 job_params = spec['script_parameters']
427 spec['script_parameters'] = {}
429 for param in self.tool.tool['inputs']:
430 param = copy.deepcopy(param)
432 # Data type and "required" flag...
433 types = param['type']
434 if not isinstance(types, list):
436 param['required'] = 'null' not in types
437 non_null_types = [t for t in types if t != "null"]
438 if len(non_null_types) == 1:
439 the_type = [c for c in non_null_types][0]
441 if isinstance(the_type, basestring):
442 dataclass = self.type_to_dataclass.get(the_type)
444 param['dataclass'] = dataclass
445 # Note: If we didn't figure out a single appropriate
446 # dataclass, we just left that attribute out. We leave
447 # the "type" attribute there in any case, which might help
450 # Title and description...
451 title = param.pop('label', '')
452 descr = param.pop('doc', '').rstrip('\n')
454 param['title'] = title
456 param['description'] = descr
458 # Fill in the value from the current job order, if any.
459 param_id = shortname(param.pop('id'))
460 value = job_params.get(param_id)
463 elif not isinstance(value, dict):
464 param['value'] = value
465 elif param.get('dataclass') in ('File', 'Collection') and value.get('location'):
466 param['value'] = value['location'][5:]
468 spec['script_parameters'][param_id] = param
469 spec['script_parameters']['cwl:tool'] = job_params['cwl:tool']
475 self.job.name: self.pipeline_component_spec(),
477 "name": self.job.name,
479 if self.runner.project_uuid:
480 body["owner_uuid"] = self.runner.project_uuid
482 self.runner.api.pipeline_templates().update(
483 uuid=self.uuid, body=body).execute(
484 num_retries=self.runner.num_retries)
485 logger.info("Updated template %s", self.uuid)
487 self.uuid = self.runner.api.pipeline_templates().create(
488 body=body, ensure_unique_name=True).execute(
489 num_retries=self.runner.num_retries)['uuid']
490 logger.info("Created template %s", self.uuid)