1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
5 from past.builtins import basestring
6 from future.utils import viewitems
13 from schema_salad.sourceline import SourceLine, cmap
14 import schema_salad.ref_resolver
16 from cwltool.pack import pack
17 from cwltool.load_tool import fetch_document, resolve_and_validate_document
18 from cwltool.process import shortname
19 from cwltool.workflow import Workflow, WorkflowException, WorkflowStep
20 from cwltool.utils import adjustFileObjs, adjustDirObjs, visit_class, normalizeFilesDirs
21 from cwltool.context import LoadingContext
23 import ruamel.yaml as yaml
25 from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection,
26 trim_anonymous_location, remove_redundant_fields, discover_secondary_files,
27 make_builder, arvados_jobs_image)
28 from .pathmapper import ArvPathMapper, trim_listing
29 from .arvtool import ArvadosCommandTool, set_cluster_target
30 from ._version import __version__
32 from .perf import Perf
34 logger = logging.getLogger('arvados.cwl-runner')
35 metrics = logging.getLogger('arvados.cwl-runner.metrics')
37 max_res_pars = ("coresMin", "coresMax", "ramMin", "ramMax", "tmpdirMin", "tmpdirMax")
38 sum_res_pars = ("outdirMin", "outdirMax")
40 def upload_workflow(arvRunner, tool, job_order, project_uuid,
41 runtimeContext, uuid=None,
42 submit_runner_ram=0, name=None, merged_map=None,
43 submit_runner_image=None,
46 packed = packed_workflow(arvRunner, tool, merged_map, runtimeContext, git_info)
48 adjustDirObjs(job_order, trim_listing)
49 adjustFileObjs(job_order, trim_anonymous_location)
50 adjustDirObjs(job_order, trim_anonymous_location)
52 main = [p for p in packed["$graph"] if p["id"] == "#main"][0]
53 for inp in main["inputs"]:
54 sn = shortname(inp["id"])
56 inp["default"] = job_order[sn]
59 name = tool.tool.get("label", os.path.basename(tool.tool["id"]))
61 upload_dependencies(arvRunner, name, tool.doc_loader,
62 packed, tool.tool["id"], False,
65 wf_runner_resources = None
67 hints = main.get("hints", [])
70 if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
71 wf_runner_resources = h
75 wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"}
76 hints.append(wf_runner_resources)
78 wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner,
79 submit_runner_image or "arvados/jobs:"+__version__,
83 wf_runner_resources["ramMin"] = submit_runner_ram
90 "description": tool.tool.get("doc", ""),
91 "definition":json.dumps(packed, sort_keys=True, indent=4, separators=(',',': '))
94 body["workflow"]["owner_uuid"] = project_uuid
97 call = arvRunner.api.workflows().update(uuid=uuid, body=body)
99 call = arvRunner.api.workflows().create(body=body)
100 return call.execute(num_retries=arvRunner.num_retries)["uuid"]
102 def dedup_reqs(reqs):
104 for r in reversed(reqs):
105 if r["class"] not in dedup and not r["class"].startswith("http://arvados.org/cwl#"):
106 dedup[r["class"]] = r
107 return [dedup[r] for r in sorted(dedup.keys())]
109 def get_overall_res_req(res_reqs):
110 """Take the overall of a list of ResourceRequirement,
111 i.e., the max of coresMin, coresMax, ramMin, ramMax, tmpdirMin, tmpdirMax
112 and the sum of outdirMin, outdirMax."""
116 for a in max_res_pars + sum_res_pars:
118 for res_req in res_reqs:
120 if isinstance(res_req[a], int): # integer check
121 all_res_req[a].append(res_req[a])
123 msg = SourceLine(res_req, a).makeError(
124 "Non-top-level ResourceRequirement in single container cannot have expressions")
125 exception_msgs.append(msg)
127 raise WorkflowException("\n".join(exception_msgs))
130 for a in all_res_req:
132 if a in max_res_pars:
133 overall_res_req[a] = max(all_res_req[a])
134 elif a in sum_res_pars:
135 overall_res_req[a] = sum(all_res_req[a])
137 overall_res_req["class"] = "ResourceRequirement"
138 return cmap(overall_res_req)
140 class ArvadosWorkflowStep(WorkflowStep):
142 toolpath_object, # type: Dict[Text, Any]
144 loadingContext, # type: LoadingContext
148 ): # type: (...) -> None
150 if arvrunner.fast_submit:
151 self.tool = toolpath_object
152 self.tool["inputs"] = []
153 self.tool["outputs"] = []
155 super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv)
156 self.tool["class"] = "WorkflowStep"
157 self.arvrunner = arvrunner
159 def job(self, joborder, output_callback, runtimeContext):
160 runtimeContext = runtimeContext.copy()
161 runtimeContext.toplevel = True # Preserve behavior for #13365
163 builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements,
164 runtimeContext, self.metadata)
165 runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
166 return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext)
169 class ArvadosWorkflow(Workflow):
170 """Wrap cwltool Workflow to override selected methods."""
172 def __init__(self, arvrunner, toolpath_object, loadingContext):
173 self.arvrunner = arvrunner
175 self.dynamic_resource_req = []
176 self.static_resource_req = []
177 self.wf_reffiles = []
178 self.loadingContext = loadingContext
179 super(ArvadosWorkflow, self).__init__(toolpath_object, loadingContext)
180 self.cluster_target_req, _ = self.get_requirement("http://arvados.org/cwl#ClusterTarget")
182 def job(self, joborder, output_callback, runtimeContext):
184 builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata)
185 runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
187 req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
189 return super(ArvadosWorkflow, self).job(joborder, output_callback, runtimeContext)
191 # RunInSingleContainer is true
193 with SourceLine(self.tool, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)):
194 if "id" not in self.tool:
195 raise WorkflowException("%s object must have 'id'" % (self.tool["class"]))
197 discover_secondary_files(self.arvrunner.fs_access, builder,
198 self.tool["inputs"], joborder)
199 normalizeFilesDirs(joborder)
201 with Perf(metrics, "subworkflow upload_deps"):
202 upload_dependencies(self.arvrunner,
203 os.path.basename(joborder.get("id", "#")),
206 joborder.get("id", "#"),
210 if self.wf_pdh is None:
211 packed = pack(self.loadingContext, self.tool["id"], loader=self.doc_loader)
213 for p in packed["$graph"]:
214 if p["id"] == "#main":
215 p["requirements"] = dedup_reqs(self.requirements)
216 p["hints"] = dedup_reqs(self.hints)
219 if "requirements" in item:
220 item["requirements"] = [i for i in item["requirements"] if i["class"] != "DockerRequirement"]
221 for t in ("hints", "requirements"):
225 if req["class"] == "ResourceRequirement":
227 for k in max_res_pars + sum_res_pars:
229 if isinstance(req[k], basestring):
230 if item["id"] == "#main":
231 # only the top-level requirements/hints may contain expressions
232 self.dynamic_resource_req.append(req)
236 with SourceLine(req, k, WorkflowException):
237 raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions")
239 self.static_resource_req.append(req)
241 visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit)
243 if self.static_resource_req:
244 self.static_resource_req = [get_overall_res_req(self.static_resource_req)]
246 upload_dependencies(self.arvrunner,
254 # Discover files/directories referenced by the
255 # workflow (mainly "default" values)
256 visit_class(packed, ("File", "Directory"), self.wf_reffiles.append)
259 if self.dynamic_resource_req:
260 # Evaluate dynamic resource requirements using current builder
261 rs = copy.copy(self.static_resource_req)
262 for dyn_rs in self.dynamic_resource_req:
263 eval_req = {"class": "ResourceRequirement"}
264 for a in max_res_pars + sum_res_pars:
266 eval_req[a] = builder.do_eval(dyn_rs[a])
268 job_res_reqs = [get_overall_res_req(rs)]
270 job_res_reqs = self.static_resource_req
272 with Perf(metrics, "subworkflow adjust"):
273 joborder_resolved = copy.deepcopy(joborder)
274 joborder_keepmount = copy.deepcopy(joborder)
277 visit_class(joborder_keepmount, ("File", "Directory"), reffiles.append)
279 mapper = ArvPathMapper(self.arvrunner, reffiles+self.wf_reffiles, runtimeContext.basedir,
283 # For containers API, we need to make sure any extra
284 # referenced files (ie referenced by the workflow but
285 # not in the inputs) are included in the mounts.
287 runtimeContext = runtimeContext.copy()
288 runtimeContext.extra_reffiles = copy.deepcopy(self.wf_reffiles)
291 remove_redundant_fields(obj)
292 with SourceLine(obj, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)):
293 if "location" not in obj:
294 raise WorkflowException("%s object is missing required 'location' field: %s" % (obj["class"], obj))
295 with SourceLine(obj, "location", WorkflowException, logger.isEnabledFor(logging.DEBUG)):
296 if obj["location"].startswith("keep:"):
297 obj["location"] = mapper.mapper(obj["location"]).target
300 elif obj["location"].startswith("_:"):
303 raise WorkflowException("Location is not a keep reference or a literal: '%s'" % obj["location"])
305 visit_class(joborder_keepmount, ("File", "Directory"), keepmount)
308 if obj["location"].startswith("keep:"):
309 obj["location"] = mapper.mapper(obj["location"]).resolved
311 visit_class(joborder_resolved, ("File", "Directory"), resolved)
313 if self.wf_pdh is None:
314 adjustFileObjs(packed, keepmount)
315 adjustDirObjs(packed, keepmount)
316 self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed, runtimeContext)
318 self.loadingContext = self.loadingContext.copy()
319 self.loadingContext.metadata = self.loadingContext.metadata.copy()
320 self.loadingContext.metadata["http://commonwl.org/cwltool#original_cwlVersion"] = "v1.0"
322 if len(job_res_reqs) == 1:
323 # RAM request needs to be at least 128 MiB or the workflow
324 # runner itself won't run reliably.
325 if job_res_reqs[0].get("ramMin", 1024) < 128:
326 job_res_reqs[0]["ramMin"] = 128
328 arguments = ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl", "cwl.input.yml"]
329 if runtimeContext.debug:
330 arguments.insert(0, '--debug')
333 "class": "CommandLineTool",
334 "baseCommand": "cwltool",
335 "inputs": self.tool["inputs"],
336 "outputs": self.tool["outputs"],
337 "stdout": "cwl.output.json",
338 "requirements": self.requirements+job_res_reqs+[
339 {"class": "InlineJavascriptRequirement"},
341 "class": "InitialWorkDirRequirement",
343 "entryname": "workflow.cwl",
344 "entry": '$({"class": "File", "location": "keep:%s/workflow.cwl"})' % self.wf_pdh
346 "entryname": "cwl.input.yml",
347 "entry": json.dumps(joborder_keepmount, indent=2, sort_keys=True, separators=(',',': ')).replace("\\", "\\\\").replace('$(', '\$(').replace('${', '\${')
351 "arguments": arguments,
354 return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext)
356 def make_workflow_step(self,
357 toolpath_object, # type: Dict[Text, Any]
359 loadingContext, # type: LoadingContext
363 # (...) -> WorkflowStep
364 return ArvadosWorkflowStep(toolpath_object, pos, loadingContext, self.arvrunner, *argc, **argv)