12913: Apply secondaryFiles to inputs to RunInSingleContainer
[arvados.git] / sdk / cwl / arvados_cwl / runner.py
index 1c10fe8196aba940495934169d559bb756cfda31..28de7f368a23ccbc52a0cae37fa55d358744d0e1 100644 (file)
@@ -1,3 +1,7 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
 import os
 import urlparse
 from functools import partial
@@ -14,7 +18,7 @@ from cwltool.draft2tool import CommandLineTool
 import cwltool.workflow
 from cwltool.process import get_feature, scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname
 from cwltool.load_tool import fetch_document
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
 from cwltool.utils import aslist
 from cwltool.builder import substitute
 from cwltool.pack import pack
@@ -23,27 +27,41 @@ import arvados.collection
 import ruamel.yaml as yaml
 
 from .arvdocker import arv_docker_get_image
-from .pathmapper import ArvPathMapper
+from .pathmapper import ArvPathMapper, trim_listing
 from ._version import __version__
 from . import done
 
 logger = logging.getLogger('arvados.cwl-runner')
 
-def trim_listing(obj):
-    """Remove 'listing' field from Directory objects that are keep references.
+def trim_anonymous_location(obj):
+    """Remove 'location' field from File and Directory literals.
+
+    To make internal handling easier, literals are assigned a random id for
+    'location'.  However, when writing the record back out, this can break
+    reproducibility.  Since it is valid for literals not have a 'location'
+    field, remove it.
 
-    When Directory objects represent Keep references, it redundant and
-    potentially very expensive to pass fully enumerated Directory objects
-    between instances of cwl-runner (e.g. a submitting a job, or using the
-    RunInSingleContainer feature), so delete the 'listing' field when it is
-    safe to do so.
     """
 
-    if obj.get("location", "").startswith("keep:") and "listing" in obj:
-        del obj["listing"]
     if obj.get("location", "").startswith("_:"):
         del obj["location"]
 
+def remove_redundant_fields(obj):
+    for field in ("path", "nameext", "nameroot", "dirname"):
+        if field in obj:
+            del obj[field]
+
+def find_defaults(d, op):
+    if isinstance(d, list):
+        for i in d:
+            find_defaults(i, op)
+    elif isinstance(d, dict):
+        if "default" in d:
+            op(d)
+        else:
+            for i in d.itervalues():
+                find_defaults(i, op)
+
 def upload_dependencies(arvrunner, name, document_loader,
                         workflowobj, uri, loadref_run, include_primary=True):
     """Upload the dependencies of the workflowobj document to Keep.
@@ -99,10 +117,28 @@ def upload_dependencies(arvrunner, name, document_loader,
         for s in workflowobj["$schemas"]:
             sc.append({"class": "File", "location": s})
 
+    def capture_default(obj):
+        remove = [False]
+        def add_default(f):
+            if "location" not in f and "path" in f:
+                f["location"] = f["path"]
+                del f["path"]
+            if "location" in f and not arvrunner.fs_access.exists(f["location"]):
+                # Remove from sc
+                sc[:] = [x for x in sc if x["location"] != f["location"]]
+                # Delete "default" from workflowobj
+                remove[0] = True
+        visit_class(obj["default"], ("File", "Directory"), add_default)
+        if remove[0]:
+            del obj["default"]
+
+    find_defaults(workflowobj, capture_default)
+
     mapper = ArvPathMapper(arvrunner, sc, "",
                            "keep:%s",
                            "keep:%s/%s",
-                           name=name)
+                           name=name,
+                           single_collection=True)
 
     def setloc(p):
         if "location" in p and (not p["location"].startswith("_:")) and (not p["location"].startswith("keep:")):
@@ -130,6 +166,8 @@ def upload_docker(arvrunner, tool):
                 raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
                     "Option 'dockerOutputDirectory' of DockerRequirement not supported.")
             arv_docker_get_image(arvrunner.api, docker_req, True, arvrunner.project_uuid)
+        else:
+            arv_docker_get_image(arvrunner.api, {"dockerPull": "arvados/jobs"}, True, arvrunner.project_uuid)
     elif isinstance(tool, cwltool.workflow.Workflow):
         for s in tool.steps:
             upload_docker(arvrunner, s.embedded_tool)
@@ -153,12 +191,8 @@ def tag_git_version(packed):
             packed["http://schema.org/version"] = githash
 
 
-def upload_job_order(arvrunner, name, tool, job_order):
-    """Upload local files referenced in the input object and return updated input
-    object with 'location' updated to the proper keep references.
-    """
-
-    for t in tool.tool["inputs"]:
+def discover_secondary_files(inputs, job_order):
+    for t in inputs:
         def setSecondary(fileobj):
             if isinstance(fileobj, dict) and fileobj.get("class") == "File":
                 if "secondaryFiles" not in fileobj:
@@ -171,6 +205,13 @@ def upload_job_order(arvrunner, name, tool, job_order):
         if shortname(t["id"]) in job_order and t.get("secondaryFiles"):
             setSecondary(job_order[shortname(t["id"])])
 
+def upload_job_order(arvrunner, name, tool, job_order):
+    """Upload local files referenced in the input object and return updated input
+    object with 'location' updated to the proper keep references.
+    """
+
+    discover_secondary_files(tool.tool["inputs"], job_order)
+
     jobmapper = upload_dependencies(arvrunner,
                                     name,
                                     tool.doc_loader,
@@ -188,7 +229,7 @@ def upload_job_order(arvrunner, name, tool, job_order):
 
     return job_order
 
-def upload_workflow_deps(arvrunner, tool):
+def upload_workflow_deps(arvrunner, tool, override_tools):
     # Ensure that Docker images needed by this workflow are available
 
     upload_docker(arvrunner, tool)
@@ -205,6 +246,7 @@ def upload_workflow_deps(arvrunner, tool):
                                 False,
                                 include_primary=False)
             document_loader.idx[deptool["id"]] = deptool
+            override_tools[deptool["id"]] = json.dumps(deptool)
 
     tool.visit(upload_tool_deps)
 
@@ -248,11 +290,18 @@ class Runner(object):
 
     def __init__(self, runner, tool, job_order, enable_reuse,
                  output_name, output_tags, submit_runner_ram=0,
-                 name=None, on_error=None, submit_runner_image=None):
+                 name=None, on_error=None, submit_runner_image=None,
+                 intermediate_output_ttl=0):
         self.arvrunner = runner
         self.tool = tool
         self.job_order = job_order
         self.running = False
+        if enable_reuse:
+            # If reuse is permitted by command line arguments but
+            # disabled by the workflow itself, disable it.
+            reuse_req, _ = get_feature(self.tool, "http://arvados.org/cwl#ReuseRequirement")
+            if reuse_req:
+                enable_reuse = reuse_req["enableReuse"]
         self.enable_reuse = enable_reuse
         self.uuid = None
         self.final_output = None
@@ -261,11 +310,12 @@ class Runner(object):
         self.name = name
         self.on_error = on_error
         self.jobs_image = submit_runner_image or "arvados/jobs:"+__version__
+        self.intermediate_output_ttl = intermediate_output_ttl
 
         if submit_runner_ram:
             self.submit_runner_ram = submit_runner_ram
         else:
-            self.submit_runner_ram = 1024
+            self.submit_runner_ram = 3000
 
         if self.submit_runner_ram <= 0:
             raise Exception("Value of --submit-runner-ram must be greater than zero")