1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
5 from past.builtins import basestring
6 from future.utils import viewitems
13 from io import StringIO
16 from typing import (MutableSequence, MutableMapping)
18 from ruamel.yaml import YAML
19 from ruamel.yaml.comments import CommentedMap, CommentedSeq
21 from schema_salad.sourceline import SourceLine, cmap
22 import schema_salad.ref_resolver
24 import arvados.collection
26 from cwltool.pack import pack
27 from cwltool.load_tool import fetch_document, resolve_and_validate_document
28 from cwltool.process import shortname
29 from cwltool.workflow import Workflow, WorkflowException, WorkflowStep
30 from cwltool.utils import adjustFileObjs, adjustDirObjs, visit_class, normalizeFilesDirs
31 from cwltool.context import LoadingContext
33 from schema_salad.ref_resolver import file_uri, uri_file_path
35 import ruamel.yaml as yaml
37 from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection,
38 trim_anonymous_location, remove_redundant_fields, discover_secondary_files,
39 make_builder, arvados_jobs_image)
40 from .pathmapper import ArvPathMapper, trim_listing
41 from .arvtool import ArvadosCommandTool, set_cluster_target
42 from ._version import __version__
44 from .perf import Perf
46 logger = logging.getLogger('arvados.cwl-runner')
47 metrics = logging.getLogger('arvados.cwl-runner.metrics')
49 max_res_pars = ("coresMin", "coresMax", "ramMin", "ramMax", "tmpdirMin", "tmpdirMax")
50 sum_res_pars = ("outdirMin", "outdirMax")
52 def make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool):
53 col = arvados.collection.Collection(api_client=arvRunner.api,
54 keep_client=arvRunner.keep_client)
56 with col.open("workflow.json", "wt") as f:
57 json.dump(packed, f, sort_keys=True, indent=4, separators=(',',': '))
59 pdh = col.portable_data_hash()
61 toolname = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
62 if git_info and git_info.get("http://arvados.org/cwl#gitDescribe"):
63 toolname = "%s (%s)" % (toolname, git_info.get("http://arvados.org/cwl#gitDescribe"))
65 existing = arvRunner.api.collections().list(filters=[["portable_data_hash", "=", pdh], ["owner_uuid", "=", project_uuid]]).execute(num_retries=arvRunner.num_retries)
66 if len(existing["items"]) == 0:
67 col.save_new(name=toolname, owner_uuid=project_uuid, ensure_unique_name=True)
69 # now construct the wrapper
72 "id": "#main/" + toolname,
75 "run": "keep:%s/workflow.json#main" % pdh,
80 for i in main["inputs"]:
82 # Make sure to only copy known fields that are meaningful at
83 # the workflow level. In practice this ensures that if we're
84 # wrapping a CommandLineTool we don't grab inputBinding.
85 # Right now also excludes extension fields, which is fine,
86 # Arvados doesn't currently look for any extension fields on
88 for f in ("type", "label", "secondaryFiles", "streamable",
89 "doc", "id", "format", "loadContents",
90 "loadListing", "default"):
103 for i in main["inputs"]:
105 "id": "#main/step/%s" % shortname(i["id"]),
109 for i in main["outputs"]:
110 step["out"].append({"id": "#main/step/%s" % shortname(i["id"])})
111 wrapper["outputs"].append({"outputSource": "#main/step/%s" % shortname(i["id"]),
115 wrapper["requirements"] = [{"class": "SubworkflowFeatureRequirement"}]
117 if main.get("requirements"):
118 wrapper["requirements"].extend(main["requirements"])
119 if main.get("hints"):
120 wrapper["hints"] = main["hints"]
122 doc = {"cwlVersion": "v1.2", "$graph": [wrapper]}
128 return json.dumps(doc, sort_keys=True, indent=4, separators=(',',': '))
130 def rel_ref(s, baseuri, urlexpander, merged_map):
131 uri = urlexpander(s, baseuri)
132 fileuri = urllib.parse.urldefrag(baseuri)[0]
133 if fileuri in merged_map:
134 replacements = merged_map[fileuri].resolved
135 if uri in replacements:
136 return replacements[uri]
138 if s.startswith("keep:"):
141 p1 = os.path.dirname(uri_file_path(baseuri))
142 p2 = os.path.dirname(uri_file_path(uri))
143 p3 = os.path.basename(uri_file_path(uri))
144 r = os.path.relpath(p2, p1)
147 return os.path.join(r, p3)
150 def update_refs(d, baseuri, urlexpander, merged_map, set_block_style, runtimeContext):
151 if set_block_style and (isinstance(d, CommentedSeq) or isinstance(d, CommentedMap)):
152 d.fa.set_block_style()
154 if isinstance(d, MutableSequence):
156 update_refs(s, baseuri, urlexpander, merged_map, set_block_style, runtimeContext)
157 elif isinstance(d, MutableMapping):
159 baseuri = urlexpander(d["id"], baseuri, scoped_id=True)
161 if d.get("class") == "DockerRequirement":
162 dockerImageId = d.get("dockerImageId") or d.get("dockerPull")
163 d["http://arvados.org/cwl#dockerCollectionPDH"] = runtimeContext.cached_docker_lookups.get(dockerImageId)
166 for field in ("$include", "$import", "location", "run"):
167 if field in d and isinstance(d[field], str):
168 d[field] = rel_ref(d[field], baseuri, urlexpander, merged_map)
171 for n, s in enumerate(d["$schemas"]):
172 d["$schemas"][n] = rel_ref(d["$schemas"][n], baseuri, urlexpander, merged_map)
174 update_refs(d[s], baseuri, urlexpander, merged_map, set_block_style, runtimeContext)
176 def new_upload_workflow(arvRunner, tool, job_order, project_uuid,
179 submit_runner_ram=0, name=None, merged_map=None,
180 submit_runner_image=None,
185 workflow_files = set()
187 include_files = set()
189 for w in tool.doc_loader.idx:
190 if w.startswith("file://"):
191 workflow_files.add(urllib.parse.urldefrag(w)[0])
192 if firstfile is None:
193 firstfile = urllib.parse.urldefrag(w)[0]
194 if w.startswith("import:file://"):
195 import_files.add(urllib.parse.urldefrag(w[7:])[0])
196 if w.startswith("include:file://"):
197 include_files.add(urllib.parse.urldefrag(w[8:])[0])
199 all_files = workflow_files | import_files | include_files
210 if f[n] != firstfile[n]:
214 while firstfile[n] != "/":
217 prefix = firstfile[:n+1]
219 col = arvados.collection.Collection()
221 for w in workflow_files | import_files:
224 text = tool.doc_loader.fetch_text(w)
225 if isinstance(text, bytes):
226 textIO = StringIO(text.decode('utf-8'))
228 textIO = StringIO(text)
230 yamlloader = schema_salad.utils.yaml_no_ts()
231 result = yamlloader.load(textIO)
233 set_block_style = False
234 if result.fa.flow_style():
235 set_block_style = True
237 # 2. find $import, $include, $schema, run, location
238 # 3. update field value
239 update_refs(result, w, tool.doc_loader.expand_url, merged_map, set_block_style, runtimeContext)
241 with col.open(w[n+1:], "wt") as f:
242 yamlloader.dump(result, stream=f)
244 for w in include_files:
245 with col.open(w[n+1:], "wb") as f1:
246 with open(uri_file_path(w), "rb") as f2:
252 toolname = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
253 if git_info and git_info.get("http://arvados.org/cwl#gitDescribe"):
254 toolname = "%s (%s)" % (toolname, git_info.get("http://arvados.org/cwl#gitDescribe"))
256 toolfile = tool.tool["id"][n+1:]
260 "arv:workflowMain": toolfile,
263 col.save_new(name=toolname, owner_uuid=arvRunner.project_uuid, ensure_unique_name=True, properties=properties)
265 adjustDirObjs(job_order, trim_listing)
266 adjustFileObjs(job_order, trim_anonymous_location)
267 adjustDirObjs(job_order, trim_anonymous_location)
269 # now construct the wrapper
272 "id": "#main/" + toolname,
275 "run": "keep:%s/%s" % (col.portable_data_hash(), toolfile),
281 wf_runner_resources = None
283 hints = main.get("hints", [])
286 if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
287 wf_runner_resources = h
291 wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"}
292 hints.append(wf_runner_resources)
295 # wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner,
296 # submit_runner_image or "arvados/jobs:"+__version__,
299 if submit_runner_ram:
300 wf_runner_resources["ramMin"] = submit_runner_ram
303 for i in main["inputs"]:
305 # Make sure to only copy known fields that are meaningful at
306 # the workflow level. In practice this ensures that if we're
307 # wrapping a CommandLineTool we don't grab inputBinding.
308 # Right now also excludes extension fields, which is fine,
309 # Arvados doesn't currently look for any extension fields on
311 for f in ("type", "label", "secondaryFiles", "streamable",
312 "doc", "format", "loadContents",
313 "loadListing", "default"):
318 sn = shortname(i["id"])
320 inp["default"] = job_order[sn]
322 inp["id"] = "#main/%s" % shortname(i["id"])
323 newinputs.append(inp)
333 for i in main["inputs"]:
335 "id": "#main/step/%s" % shortname(i["id"]),
336 "source": "#main/%s" % shortname(i["id"])
339 for i in main["outputs"]:
340 step["out"].append({"id": "#main/step/%s" % shortname(i["id"])})
341 wrapper["outputs"].append({"outputSource": "#main/step/%s" % shortname(i["id"]),
343 "id": "#main/%s" % shortname(i["id"])})
345 wrapper["requirements"] = [{"class": "SubworkflowFeatureRequirement"}]
347 if main.get("requirements"):
348 wrapper["requirements"].extend(main["requirements"])
350 wrapper["hints"] = main["hints"]
352 doc = {"cwlVersion": "v1.2", "$graph": [wrapper]}
358 update_refs(wrapper, main["id"], tool.doc_loader.expand_url, merged_map, False, runtimeContext)
363 def make_workflow_record(arvRunner, doc, name, tool, project_uuid, update_uuid):
365 wrappertext = json.dumps(doc, sort_keys=True, indent=4, separators=(',',': '))
370 "description": tool.tool.get("doc", ""),
371 "definition": wrappertext
374 body["workflow"]["owner_uuid"] = project_uuid
377 call = arvRunner.api.workflows().update(uuid=update_uuid, body=body)
379 call = arvRunner.api.workflows().create(body=body)
380 return call.execute(num_retries=arvRunner.num_retries)["uuid"]
383 def upload_workflow(arvRunner, tool, job_order, project_uuid,
384 runtimeContext, uuid=None,
385 submit_runner_ram=0, name=None, merged_map=None,
386 submit_runner_image=None,
389 packed = packed_workflow(arvRunner, tool, merged_map, runtimeContext, git_info)
391 adjustDirObjs(job_order, trim_listing)
392 adjustFileObjs(job_order, trim_anonymous_location)
393 adjustDirObjs(job_order, trim_anonymous_location)
395 main = [p for p in packed["$graph"] if p["id"] == "#main"][0]
396 for inp in main["inputs"]:
397 sn = shortname(inp["id"])
399 inp["default"] = job_order[sn]
402 name = tool.tool.get("label", os.path.basename(tool.tool["id"]))
404 upload_dependencies(arvRunner, name, tool.doc_loader,
405 packed, tool.tool["id"],
408 wf_runner_resources = None
410 hints = main.get("hints", [])
413 if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
414 wf_runner_resources = h
418 wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"}
419 hints.append(wf_runner_resources)
421 wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner,
422 submit_runner_image or "arvados/jobs:"+__version__,
425 if submit_runner_ram:
426 wf_runner_resources["ramMin"] = submit_runner_ram
428 main["hints"] = hints
430 wrapper = make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool)
435 "description": tool.tool.get("doc", ""),
436 "definition": wrapper
439 body["workflow"]["owner_uuid"] = project_uuid
442 call = arvRunner.api.workflows().update(uuid=uuid, body=body)
444 call = arvRunner.api.workflows().create(body=body)
445 return call.execute(num_retries=arvRunner.num_retries)["uuid"]
447 def dedup_reqs(reqs):
449 for r in reversed(reqs):
450 if r["class"] not in dedup and not r["class"].startswith("http://arvados.org/cwl#"):
451 dedup[r["class"]] = r
452 return [dedup[r] for r in sorted(dedup.keys())]
454 def get_overall_res_req(res_reqs):
455 """Take the overall of a list of ResourceRequirement,
456 i.e., the max of coresMin, coresMax, ramMin, ramMax, tmpdirMin, tmpdirMax
457 and the sum of outdirMin, outdirMax."""
461 for a in max_res_pars + sum_res_pars:
463 for res_req in res_reqs:
465 if isinstance(res_req[a], int): # integer check
466 all_res_req[a].append(res_req[a])
468 msg = SourceLine(res_req, a).makeError(
469 "Non-top-level ResourceRequirement in single container cannot have expressions")
470 exception_msgs.append(msg)
472 raise WorkflowException("\n".join(exception_msgs))
475 for a in all_res_req:
477 if a in max_res_pars:
478 overall_res_req[a] = max(all_res_req[a])
479 elif a in sum_res_pars:
480 overall_res_req[a] = sum(all_res_req[a])
482 overall_res_req["class"] = "ResourceRequirement"
483 return cmap(overall_res_req)
485 class ArvadosWorkflowStep(WorkflowStep):
487 toolpath_object, # type: Dict[Text, Any]
489 loadingContext, # type: LoadingContext
493 ): # type: (...) -> None
495 if arvrunner.fast_submit:
496 self.tool = toolpath_object
497 self.tool["inputs"] = []
498 self.tool["outputs"] = []
500 super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv)
501 self.tool["class"] = "WorkflowStep"
502 self.arvrunner = arvrunner
504 def job(self, joborder, output_callback, runtimeContext):
505 runtimeContext = runtimeContext.copy()
506 runtimeContext.toplevel = True # Preserve behavior for #13365
508 builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements,
509 runtimeContext, self.metadata)
510 runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
511 return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext)
514 class ArvadosWorkflow(Workflow):
515 """Wrap cwltool Workflow to override selected methods."""
517 def __init__(self, arvrunner, toolpath_object, loadingContext):
518 self.arvrunner = arvrunner
520 self.dynamic_resource_req = []
521 self.static_resource_req = []
522 self.wf_reffiles = []
523 self.loadingContext = loadingContext
524 super(ArvadosWorkflow, self).__init__(toolpath_object, loadingContext)
525 self.cluster_target_req, _ = self.get_requirement("http://arvados.org/cwl#ClusterTarget")
527 def job(self, joborder, output_callback, runtimeContext):
529 builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata)
530 runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
532 req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
534 return super(ArvadosWorkflow, self).job(joborder, output_callback, runtimeContext)
536 # RunInSingleContainer is true
538 with SourceLine(self.tool, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)):
539 if "id" not in self.tool:
540 raise WorkflowException("%s object must have 'id'" % (self.tool["class"]))
542 discover_secondary_files(self.arvrunner.fs_access, builder,
543 self.tool["inputs"], joborder)
544 normalizeFilesDirs(joborder)
546 with Perf(metrics, "subworkflow upload_deps"):
547 upload_dependencies(self.arvrunner,
548 os.path.basename(joborder.get("id", "#")),
551 joborder.get("id", "#"),
554 if self.wf_pdh is None:
555 packed = pack(self.loadingContext, self.tool["id"], loader=self.doc_loader)
557 for p in packed["$graph"]:
558 if p["id"] == "#main":
559 p["requirements"] = dedup_reqs(self.requirements)
560 p["hints"] = dedup_reqs(self.hints)
563 if "requirements" in item:
564 item["requirements"] = [i for i in item["requirements"] if i["class"] != "DockerRequirement"]
565 for t in ("hints", "requirements"):
569 if req["class"] == "ResourceRequirement":
571 for k in max_res_pars + sum_res_pars:
573 if isinstance(req[k], basestring):
574 if item["id"] == "#main":
575 # only the top-level requirements/hints may contain expressions
576 self.dynamic_resource_req.append(req)
580 with SourceLine(req, k, WorkflowException):
581 raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions")
583 self.static_resource_req.append(req)
585 visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit)
587 if self.static_resource_req:
588 self.static_resource_req = [get_overall_res_req(self.static_resource_req)]
590 upload_dependencies(self.arvrunner,
597 # Discover files/directories referenced by the
598 # workflow (mainly "default" values)
599 visit_class(packed, ("File", "Directory"), self.wf_reffiles.append)
602 if self.dynamic_resource_req:
603 # Evaluate dynamic resource requirements using current builder
604 rs = copy.copy(self.static_resource_req)
605 for dyn_rs in self.dynamic_resource_req:
606 eval_req = {"class": "ResourceRequirement"}
607 for a in max_res_pars + sum_res_pars:
609 eval_req[a] = builder.do_eval(dyn_rs[a])
611 job_res_reqs = [get_overall_res_req(rs)]
613 job_res_reqs = self.static_resource_req
615 with Perf(metrics, "subworkflow adjust"):
616 joborder_resolved = copy.deepcopy(joborder)
617 joborder_keepmount = copy.deepcopy(joborder)
620 visit_class(joborder_keepmount, ("File", "Directory"), reffiles.append)
622 mapper = ArvPathMapper(self.arvrunner, reffiles+self.wf_reffiles, runtimeContext.basedir,
626 # For containers API, we need to make sure any extra
627 # referenced files (ie referenced by the workflow but
628 # not in the inputs) are included in the mounts.
630 runtimeContext = runtimeContext.copy()
631 runtimeContext.extra_reffiles = copy.deepcopy(self.wf_reffiles)
634 remove_redundant_fields(obj)
635 with SourceLine(obj, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)):
636 if "location" not in obj:
637 raise WorkflowException("%s object is missing required 'location' field: %s" % (obj["class"], obj))
638 with SourceLine(obj, "location", WorkflowException, logger.isEnabledFor(logging.DEBUG)):
639 if obj["location"].startswith("keep:"):
640 obj["location"] = mapper.mapper(obj["location"]).target
643 elif obj["location"].startswith("_:"):
646 raise WorkflowException("Location is not a keep reference or a literal: '%s'" % obj["location"])
648 visit_class(joborder_keepmount, ("File", "Directory"), keepmount)
651 if obj["location"].startswith("keep:"):
652 obj["location"] = mapper.mapper(obj["location"]).resolved
654 visit_class(joborder_resolved, ("File", "Directory"), resolved)
656 if self.wf_pdh is None:
657 adjustFileObjs(packed, keepmount)
658 adjustDirObjs(packed, keepmount)
659 self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed, runtimeContext)
661 self.loadingContext = self.loadingContext.copy()
662 self.loadingContext.metadata = self.loadingContext.metadata.copy()
663 self.loadingContext.metadata["http://commonwl.org/cwltool#original_cwlVersion"] = "v1.0"
665 if len(job_res_reqs) == 1:
666 # RAM request needs to be at least 128 MiB or the workflow
667 # runner itself won't run reliably.
668 if job_res_reqs[0].get("ramMin", 1024) < 128:
669 job_res_reqs[0]["ramMin"] = 128
671 arguments = ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl", "cwl.input.yml"]
672 if runtimeContext.debug:
673 arguments.insert(0, '--debug')
676 "class": "CommandLineTool",
677 "baseCommand": "cwltool",
678 "inputs": self.tool["inputs"],
679 "outputs": self.tool["outputs"],
680 "stdout": "cwl.output.json",
681 "requirements": self.requirements+job_res_reqs+[
682 {"class": "InlineJavascriptRequirement"},
684 "class": "InitialWorkDirRequirement",
686 "entryname": "workflow.cwl",
687 "entry": '$({"class": "File", "location": "keep:%s/workflow.cwl"})' % self.wf_pdh
689 "entryname": "cwl.input.yml",
690 "entry": json.dumps(joborder_keepmount, indent=2, sort_keys=True, separators=(',',': ')).replace("\\", "\\\\").replace('$(', '\$(').replace('${', '\${')
694 "arguments": arguments,
697 return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext)
699 def make_workflow_step(self,
700 toolpath_object, # type: Dict[Text, Any]
702 loadingContext, # type: LoadingContext
706 # (...) -> WorkflowStep
707 return ArvadosWorkflowStep(toolpath_object, pos, loadingContext, self.arvrunner, *argc, **argv)