X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/1586823b65c7ec7656626e491a31f3f9516a4a56..2a3daf14afb93de4d65108019a7a1d35aa1052ad:/sdk/cwl/arvados_cwl/runner.py diff --git a/sdk/cwl/arvados_cwl/runner.py b/sdk/cwl/arvados_cwl/runner.py index 69e4f5bd7b..28de7f368a 100644 --- a/sdk/cwl/arvados_cwl/runner.py +++ b/sdk/cwl/arvados_cwl/runner.py @@ -1,3 +1,7 @@ +# Copyright (C) The Arvados Authors. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 + import os import urlparse from functools import partial @@ -14,7 +18,7 @@ from cwltool.draft2tool import CommandLineTool import cwltool.workflow from cwltool.process import get_feature, scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname from cwltool.load_tool import fetch_document -from cwltool.pathmapper import adjustFileObjs, adjustDirObjs +from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class from cwltool.utils import aslist from cwltool.builder import substitute from cwltool.pack import pack @@ -42,6 +46,22 @@ def trim_anonymous_location(obj): if obj.get("location", "").startswith("_:"): del obj["location"] +def remove_redundant_fields(obj): + for field in ("path", "nameext", "nameroot", "dirname"): + if field in obj: + del obj[field] + +def find_defaults(d, op): + if isinstance(d, list): + for i in d: + find_defaults(i, op) + elif isinstance(d, dict): + if "default" in d: + op(d) + else: + for i in d.itervalues(): + find_defaults(i, op) + def upload_dependencies(arvrunner, name, document_loader, workflowobj, uri, loadref_run, include_primary=True): """Upload the dependencies of the workflowobj document to Keep. @@ -97,10 +117,28 @@ def upload_dependencies(arvrunner, name, document_loader, for s in workflowobj["$schemas"]: sc.append({"class": "File", "location": s}) + def capture_default(obj): + remove = [False] + def add_default(f): + if "location" not in f and "path" in f: + f["location"] = f["path"] + del f["path"] + if "location" in f and not arvrunner.fs_access.exists(f["location"]): + # Remove from sc + sc[:] = [x for x in sc if x["location"] != f["location"]] + # Delete "default" from workflowobj + remove[0] = True + visit_class(obj["default"], ("File", "Directory"), add_default) + if remove[0]: + del obj["default"] + + find_defaults(workflowobj, capture_default) + mapper = ArvPathMapper(arvrunner, sc, "", "keep:%s", "keep:%s/%s", - name=name) + name=name, + single_collection=True) def setloc(p): if "location" in p and (not p["location"].startswith("_:")) and (not p["location"].startswith("keep:")): @@ -128,6 +166,8 @@ def upload_docker(arvrunner, tool): raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError( "Option 'dockerOutputDirectory' of DockerRequirement not supported.") arv_docker_get_image(arvrunner.api, docker_req, True, arvrunner.project_uuid) + else: + arv_docker_get_image(arvrunner.api, {"dockerPull": "arvados/jobs"}, True, arvrunner.project_uuid) elif isinstance(tool, cwltool.workflow.Workflow): for s in tool.steps: upload_docker(arvrunner, s.embedded_tool) @@ -151,12 +191,8 @@ def tag_git_version(packed): packed["http://schema.org/version"] = githash -def upload_job_order(arvrunner, name, tool, job_order): - """Upload local files referenced in the input object and return updated input - object with 'location' updated to the proper keep references. - """ - - for t in tool.tool["inputs"]: +def discover_secondary_files(inputs, job_order): + for t in inputs: def setSecondary(fileobj): if isinstance(fileobj, dict) and fileobj.get("class") == "File": if "secondaryFiles" not in fileobj: @@ -169,6 +205,13 @@ def upload_job_order(arvrunner, name, tool, job_order): if shortname(t["id"]) in job_order and t.get("secondaryFiles"): setSecondary(job_order[shortname(t["id"])]) +def upload_job_order(arvrunner, name, tool, job_order): + """Upload local files referenced in the input object and return updated input + object with 'location' updated to the proper keep references. + """ + + discover_secondary_files(tool.tool["inputs"], job_order) + jobmapper = upload_dependencies(arvrunner, name, tool.doc_loader, @@ -186,7 +229,7 @@ def upload_job_order(arvrunner, name, tool, job_order): return job_order -def upload_workflow_deps(arvrunner, tool): +def upload_workflow_deps(arvrunner, tool, override_tools): # Ensure that Docker images needed by this workflow are available upload_docker(arvrunner, tool) @@ -203,6 +246,7 @@ def upload_workflow_deps(arvrunner, tool): False, include_primary=False) document_loader.idx[deptool["id"]] = deptool + override_tools[deptool["id"]] = json.dumps(deptool) tool.visit(upload_tool_deps) @@ -246,11 +290,18 @@ class Runner(object): def __init__(self, runner, tool, job_order, enable_reuse, output_name, output_tags, submit_runner_ram=0, - name=None, on_error=None, submit_runner_image=None): + name=None, on_error=None, submit_runner_image=None, + intermediate_output_ttl=0): self.arvrunner = runner self.tool = tool self.job_order = job_order self.running = False + if enable_reuse: + # If reuse is permitted by command line arguments but + # disabled by the workflow itself, disable it. + reuse_req, _ = get_feature(self.tool, "http://arvados.org/cwl#ReuseRequirement") + if reuse_req: + enable_reuse = reuse_req["enableReuse"] self.enable_reuse = enable_reuse self.uuid = None self.final_output = None @@ -259,6 +310,7 @@ class Runner(object): self.name = name self.on_error = on_error self.jobs_image = submit_runner_image or "arvados/jobs:"+__version__ + self.intermediate_output_ttl = intermediate_output_ttl if submit_runner_ram: self.submit_runner_ram = submit_runner_ram