X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/8e693a9981f03d229ff2bff7dd6e5d06e0790c19..c22d90571a1fcb4b52e5387a791e3aefff5be6af:/sdk/cwl/arvados_cwl/arvworkflow.py diff --git a/sdk/cwl/arvados_cwl/arvworkflow.py b/sdk/cwl/arvados_cwl/arvworkflow.py index ae90625102..4fe82a6fe1 100644 --- a/sdk/cwl/arvados_cwl/arvworkflow.py +++ b/sdk/cwl/arvados_cwl/arvworkflow.py @@ -2,27 +2,33 @@ # # SPDX-License-Identifier: Apache-2.0 +from past.builtins import basestring +from future.utils import viewitems + import os import json import copy import logging from schema_salad.sourceline import SourceLine, cmap +import schema_salad.ref_resolver from cwltool.pack import pack -from cwltool.load_tool import fetch_document +from cwltool.load_tool import fetch_document, resolve_and_validate_document from cwltool.process import shortname -from cwltool.workflow import Workflow, WorkflowException -from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class -from cwltool.builder import Builder +from cwltool.workflow import Workflow, WorkflowException, WorkflowStep +from cwltool.utils import adjustFileObjs, adjustDirObjs, visit_class, normalizeFilesDirs from cwltool.context import LoadingContext import ruamel.yaml as yaml from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection, - trim_anonymous_location, remove_redundant_fields, discover_secondary_files) + trim_anonymous_location, remove_redundant_fields, discover_secondary_files, + make_builder, arvados_jobs_image) from .pathmapper import ArvPathMapper, trim_listing -from .arvtool import ArvadosCommandTool +from .arvtool import ArvadosCommandTool, set_cluster_target +from ._version import __version__ + from .perf import Perf logger = logging.getLogger('arvados.cwl-runner') @@ -32,7 +38,8 @@ max_res_pars = ("coresMin", "coresMax", "ramMin", "ramMax", "tmpdirMin", "tmpdir sum_res_pars = ("outdirMin", "outdirMax") def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None, - submit_runner_ram=0, name=None, merged_map=None): + submit_runner_ram=0, name=None, merged_map=None, + submit_runner_image=None): packed = packed_workflow(arvRunner, tool, merged_map) @@ -52,18 +59,25 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None, upload_dependencies(arvRunner, name, tool.doc_loader, packed, tool.tool["id"], False) + wf_runner_resources = None + + hints = main.get("hints", []) + found = False + for h in hints: + if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources": + wf_runner_resources = h + found = True + break + if not found: + wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"} + hints.append(wf_runner_resources) + + wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner, submit_runner_image or "arvados/jobs:"+__version__) + if submit_runner_ram: - hints = main.get("hints", []) - found = False - for h in hints: - if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources": - h["ramMin"] = submit_runner_ram - found = True - break - if not found: - hints.append({"class": "http://arvados.org/cwl#WorkflowRunnerResources", - "ramMin": submit_runner_ram}) - main["hints"] = hints + wf_runner_resources["ramMin"] = submit_runner_ram + + main["hints"] = hints body = { "workflow": { @@ -118,171 +132,221 @@ def get_overall_res_req(res_reqs): overall_res_req["class"] = "ResourceRequirement" return cmap(overall_res_req) +class ArvadosWorkflowStep(WorkflowStep): + def __init__(self, + toolpath_object, # type: Dict[Text, Any] + pos, # type: int + loadingContext, # type: LoadingContext + arvrunner, + *argc, + **argv + ): # type: (...) -> None + + super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv) + self.tool["class"] = "WorkflowStep" + self.arvrunner = arvrunner + + def job(self, joborder, output_callback, runtimeContext): + runtimeContext = runtimeContext.copy() + runtimeContext.toplevel = True # Preserve behavior for #13365 + + builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements, + runtimeContext, self.metadata) + runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext) + return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext) + + class ArvadosWorkflow(Workflow): """Wrap cwltool Workflow to override selected methods.""" def __init__(self, arvrunner, toolpath_object, loadingContext): - super(ArvadosWorkflow, self).__init__(toolpath_object, loadingContext) self.arvrunner = arvrunner self.wf_pdh = None self.dynamic_resource_req = [] self.static_resource_req = [] self.wf_reffiles = [] self.loadingContext = loadingContext + super(ArvadosWorkflow, self).__init__(toolpath_object, loadingContext) + self.cluster_target_req, _ = self.get_requirement("http://arvados.org/cwl#ClusterTarget") def job(self, joborder, output_callback, runtimeContext): + + builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata) + runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext) + req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer") - if req: - with SourceLine(self.tool, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)): - if "id" not in self.tool: - raise WorkflowException("%s object must have 'id'" % (self.tool["class"])) - document_loader, workflowobj, uri = (self.doc_loader, self.doc_loader.fetch(self.tool["id"]), self.tool["id"]) + if not req: + return super(ArvadosWorkflow, self).job(joborder, output_callback, runtimeContext) - discover_secondary_files(self.tool["inputs"], joborder) + # RunInSingleContainer is true + + with SourceLine(self.tool, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)): + if "id" not in self.tool: + raise WorkflowException("%s object must have 'id'" % (self.tool["class"])) + + discover_secondary_files(self.arvrunner.fs_access, builder, + self.tool["inputs"], joborder) + normalizeFilesDirs(joborder) + + with Perf(metrics, "subworkflow upload_deps"): + upload_dependencies(self.arvrunner, + os.path.basename(joborder.get("id", "#")), + self.doc_loader, + joborder, + joborder.get("id", "#"), + False) + + if self.wf_pdh is None: + packed = pack(self.loadingContext, self.tool["id"], loader=self.doc_loader) + + for p in packed["$graph"]: + if p["id"] == "#main": + p["requirements"] = dedup_reqs(self.requirements) + p["hints"] = dedup_reqs(self.hints) + + def visit(item): + if "requirements" in item: + item["requirements"] = [i for i in item["requirements"] if i["class"] != "DockerRequirement"] + for t in ("hints", "requirements"): + if t not in item: + continue + for req in item[t]: + if req["class"] == "ResourceRequirement": + dyn = False + for k in max_res_pars + sum_res_pars: + if k in req: + if isinstance(req[k], basestring): + if item["id"] == "#main": + # only the top-level requirements/hints may contain expressions + self.dynamic_resource_req.append(req) + dyn = True + break + else: + with SourceLine(req, k, WorkflowException): + raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions") + if not dyn: + self.static_resource_req.append(req) + + visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit) + + if self.static_resource_req: + self.static_resource_req = [get_overall_res_req(self.static_resource_req)] - with Perf(metrics, "subworkflow upload_deps"): upload_dependencies(self.arvrunner, - os.path.basename(joborder.get("id", "#")), - document_loader, - joborder, - joborder.get("id", "#"), + runtimeContext.name, + self.doc_loader, + packed, + self.tool["id"], False) - if self.wf_pdh is None: - workflowobj["requirements"] = dedup_reqs(self.requirements) - workflowobj["hints"] = dedup_reqs(self.hints) - - packed = pack(document_loader, workflowobj, uri, self.metadata) - - builder = Builder(joborder, - requirements=workflowobj["requirements"], - hints=workflowobj["hints"], - resources={}) - - def visit(item): - for t in ("hints", "requirements"): - if t not in item: - continue - for req in item[t]: - if req["class"] == "ResourceRequirement": - dyn = False - for k in max_res_pars + sum_res_pars: - if k in req: - if isinstance(req[k], basestring): - if item["id"] == "#main": - # only the top-level requirements/hints may contain expressions - self.dynamic_resource_req.append(req) - dyn = True - break - else: - with SourceLine(req, k, WorkflowException): - raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions") - if not dyn: - self.static_resource_req.append(req) - - visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit) - - if self.static_resource_req: - self.static_resource_req = [get_overall_res_req(self.static_resource_req)] - - upload_dependencies(self.arvrunner, - runtimeContext.name, - document_loader, - packed, - uri, - False) - - # Discover files/directories referenced by the - # workflow (mainly "default" values) - visit_class(packed, ("File", "Directory"), self.wf_reffiles.append) - - - if self.dynamic_resource_req: - builder = Builder(joborder, - requirements=self.requirements, - hints=self.hints, - resources={}) - - # Evaluate dynamic resource requirements using current builder - rs = copy.copy(self.static_resource_req) - for dyn_rs in self.dynamic_resource_req: - eval_req = {"class": "ResourceRequirement"} - for a in max_res_pars + sum_res_pars: - if a in dyn_rs: - eval_req[a] = builder.do_eval(dyn_rs[a]) - rs.append(eval_req) - job_res_reqs = [get_overall_res_req(rs)] - else: - job_res_reqs = self.static_resource_req - - with Perf(metrics, "subworkflow adjust"): - joborder_resolved = copy.deepcopy(joborder) - joborder_keepmount = copy.deepcopy(joborder) - - reffiles = [] - visit_class(joborder_keepmount, ("File", "Directory"), reffiles.append) - - mapper = ArvPathMapper(self.arvrunner, reffiles+self.wf_reffiles, runtimeContext.basedir, - "/keep/%s", - "/keep/%s/%s") - - # For containers API, we need to make sure any extra - # referenced files (ie referenced by the workflow but - # not in the inputs) are included in the mounts. - if self.wf_reffiles: - runtimeContext = runtimeContext.copy() - runtimeContext.extra_reffiles = copy.deepcopy(self.wf_reffiles) - - def keepmount(obj): - remove_redundant_fields(obj) - with SourceLine(obj, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)): - if "location" not in obj: - raise WorkflowException("%s object is missing required 'location' field: %s" % (obj["class"], obj)) - with SourceLine(obj, "location", WorkflowException, logger.isEnabledFor(logging.DEBUG)): - if obj["location"].startswith("keep:"): - obj["location"] = mapper.mapper(obj["location"]).target - if "listing" in obj: - del obj["listing"] - elif obj["location"].startswith("_:"): - del obj["location"] - else: - raise WorkflowException("Location is not a keep reference or a literal: '%s'" % obj["location"]) - - visit_class(joborder_keepmount, ("File", "Directory"), keepmount) - - def resolved(obj): - if obj["location"].startswith("keep:"): - obj["location"] = mapper.mapper(obj["location"]).resolved - - visit_class(joborder_resolved, ("File", "Directory"), resolved) - - if self.wf_pdh is None: - adjustFileObjs(packed, keepmount) - adjustDirObjs(packed, keepmount) - self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed) - - wf_runner = cmap({ - "class": "CommandLineTool", - "baseCommand": "cwltool", - "inputs": self.tool["inputs"], - "outputs": self.tool["outputs"], - "stdout": "cwl.output.json", - "requirements": self.requirements+job_res_reqs+[ - {"class": "InlineJavascriptRequirement"}, - { - "class": "InitialWorkDirRequirement", - "listing": [{ - "entryname": "workflow.cwl", - "entry": '$({"class": "File", "location": "keep:%s/workflow.cwl"})' % self.wf_pdh - }, { - "entryname": "cwl.input.yml", - "entry": json.dumps(joborder_keepmount, indent=2, sort_keys=True, separators=(',',': ')).replace("\\", "\\\\").replace('$(', '\$(').replace('${', '\${') - }] - }], - "hints": self.hints, - "arguments": ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl#main", "cwl.input.yml"], - "id": "#" - }) - return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext) + # Discover files/directories referenced by the + # workflow (mainly "default" values) + visit_class(packed, ("File", "Directory"), self.wf_reffiles.append) + + + if self.dynamic_resource_req: + # Evaluate dynamic resource requirements using current builder + rs = copy.copy(self.static_resource_req) + for dyn_rs in self.dynamic_resource_req: + eval_req = {"class": "ResourceRequirement"} + for a in max_res_pars + sum_res_pars: + if a in dyn_rs: + eval_req[a] = builder.do_eval(dyn_rs[a]) + rs.append(eval_req) + job_res_reqs = [get_overall_res_req(rs)] else: - return super(ArvadosWorkflow, self).job(joborder, output_callback, runtimeContext) + job_res_reqs = self.static_resource_req + + with Perf(metrics, "subworkflow adjust"): + joborder_resolved = copy.deepcopy(joborder) + joborder_keepmount = copy.deepcopy(joborder) + + reffiles = [] + visit_class(joborder_keepmount, ("File", "Directory"), reffiles.append) + + mapper = ArvPathMapper(self.arvrunner, reffiles+self.wf_reffiles, runtimeContext.basedir, + "/keep/%s", + "/keep/%s/%s") + + # For containers API, we need to make sure any extra + # referenced files (ie referenced by the workflow but + # not in the inputs) are included in the mounts. + if self.wf_reffiles: + runtimeContext = runtimeContext.copy() + runtimeContext.extra_reffiles = copy.deepcopy(self.wf_reffiles) + + def keepmount(obj): + remove_redundant_fields(obj) + with SourceLine(obj, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)): + if "location" not in obj: + raise WorkflowException("%s object is missing required 'location' field: %s" % (obj["class"], obj)) + with SourceLine(obj, "location", WorkflowException, logger.isEnabledFor(logging.DEBUG)): + if obj["location"].startswith("keep:"): + obj["location"] = mapper.mapper(obj["location"]).target + if "listing" in obj: + del obj["listing"] + elif obj["location"].startswith("_:"): + del obj["location"] + else: + raise WorkflowException("Location is not a keep reference or a literal: '%s'" % obj["location"]) + + visit_class(joborder_keepmount, ("File", "Directory"), keepmount) + + def resolved(obj): + if obj["location"].startswith("keep:"): + obj["location"] = mapper.mapper(obj["location"]).resolved + + visit_class(joborder_resolved, ("File", "Directory"), resolved) + + if self.wf_pdh is None: + adjustFileObjs(packed, keepmount) + adjustDirObjs(packed, keepmount) + self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed) + + self.loadingContext = self.loadingContext.copy() + self.loadingContext.metadata = self.loadingContext.metadata.copy() + self.loadingContext.metadata["http://commonwl.org/cwltool#original_cwlVersion"] = "v1.0" + + if len(job_res_reqs) == 1: + # RAM request needs to be at least 128 MiB or the workflow + # runner itself won't run reliably. + if job_res_reqs[0].get("ramMin", 1024) < 128: + job_res_reqs[0]["ramMin"] = 128 + + arguments = ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl", "cwl.input.yml"] + if runtimeContext.debug: + arguments.insert(0, '--debug') + + wf_runner = cmap({ + "class": "CommandLineTool", + "baseCommand": "cwltool", + "inputs": self.tool["inputs"], + "outputs": self.tool["outputs"], + "stdout": "cwl.output.json", + "requirements": self.requirements+job_res_reqs+[ + {"class": "InlineJavascriptRequirement"}, + { + "class": "InitialWorkDirRequirement", + "listing": [{ + "entryname": "workflow.cwl", + "entry": '$({"class": "File", "location": "keep:%s/workflow.cwl"})' % self.wf_pdh + }, { + "entryname": "cwl.input.yml", + "entry": json.dumps(joborder_keepmount, indent=2, sort_keys=True, separators=(',',': ')).replace("\\", "\\\\").replace('$(', '\$(').replace('${', '\${') + }] + }], + "hints": self.hints, + "arguments": arguments, + "id": "#" + }) + return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext) + + def make_workflow_step(self, + toolpath_object, # type: Dict[Text, Any] + pos, # type: int + loadingContext, # type: LoadingContext + *argc, + **argv + ): + # (...) -> WorkflowStep + return ArvadosWorkflowStep(toolpath_object, pos, loadingContext, self.arvrunner, *argc, **argv)