12913: Apply secondaryFiles to inputs to RunInSingleContainer
[arvados.git] / sdk / cwl / arvados_cwl / runner.py
index ddeac3ab0691e0d33b05b2ce774a2805a8a13deb..28de7f368a23ccbc52a0cae37fa55d358744d0e1 100644 (file)
@@ -1,12 +1,15 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
 import os
 import urlparse
 from functools import partial
 import logging
 import json
-import re
 import subprocess
 
-from cStringIO import StringIO
+from StringIO import StringIO
 
 from schema_salad.sourceline import SourceLine
 
@@ -15,7 +18,7 @@ from cwltool.draft2tool import CommandLineTool
 import cwltool.workflow
 from cwltool.process import get_feature, scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname
 from cwltool.load_tool import fetch_document
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
 from cwltool.utils import aslist
 from cwltool.builder import substitute
 from cwltool.pack import pack
@@ -24,29 +27,41 @@ import arvados.collection
 import ruamel.yaml as yaml
 
 from .arvdocker import arv_docker_get_image
-from .pathmapper import ArvPathMapper
+from .pathmapper import ArvPathMapper, trim_listing
 from ._version import __version__
 from . import done
 
 logger = logging.getLogger('arvados.cwl-runner')
 
-cwltool.draft2tool.ACCEPTLIST_RE = re.compile(r".*")
+def trim_anonymous_location(obj):
+    """Remove 'location' field from File and Directory literals.
 
-def trim_listing(obj):
-    """Remove 'listing' field from Directory objects that are keep references.
+    To make internal handling easier, literals are assigned a random id for
+    'location'.  However, when writing the record back out, this can break
+    reproducibility.  Since it is valid for literals not have a 'location'
+    field, remove it.
 
-    When Directory objects represent Keep references, it redundant and
-    potentially very expensive to pass fully enumerated Directory objects
-    between instances of cwl-runner (e.g. a submitting a job, or using the
-    RunInSingleContainer feature), so delete the 'listing' field when it is
-    safe to do so.
     """
 
-    if obj.get("location", "").startswith("keep:") and "listing" in obj:
-        del obj["listing"]
     if obj.get("location", "").startswith("_:"):
         del obj["location"]
 
+def remove_redundant_fields(obj):
+    for field in ("path", "nameext", "nameroot", "dirname"):
+        if field in obj:
+            del obj[field]
+
+def find_defaults(d, op):
+    if isinstance(d, list):
+        for i in d:
+            find_defaults(i, op)
+    elif isinstance(d, dict):
+        if "default" in d:
+            op(d)
+        else:
+            for i in d.itervalues():
+                find_defaults(i, op)
+
 def upload_dependencies(arvrunner, name, document_loader,
                         workflowobj, uri, loadref_run, include_primary=True):
     """Upload the dependencies of the workflowobj document to Keep.
@@ -98,10 +113,32 @@ def upload_dependencies(arvrunner, name, document_loader,
     if include_primary and "id" in workflowobj:
         sc.append({"class": "File", "location": workflowobj["id"]})
 
+    if "$schemas" in workflowobj:
+        for s in workflowobj["$schemas"]:
+            sc.append({"class": "File", "location": s})
+
+    def capture_default(obj):
+        remove = [False]
+        def add_default(f):
+            if "location" not in f and "path" in f:
+                f["location"] = f["path"]
+                del f["path"]
+            if "location" in f and not arvrunner.fs_access.exists(f["location"]):
+                # Remove from sc
+                sc[:] = [x for x in sc if x["location"] != f["location"]]
+                # Delete "default" from workflowobj
+                remove[0] = True
+        visit_class(obj["default"], ("File", "Directory"), add_default)
+        if remove[0]:
+            del obj["default"]
+
+    find_defaults(workflowobj, capture_default)
+
     mapper = ArvPathMapper(arvrunner, sc, "",
                            "keep:%s",
                            "keep:%s/%s",
-                           name=name)
+                           name=name,
+                           single_collection=True)
 
     def setloc(p):
         if "location" in p and (not p["location"].startswith("_:")) and (not p["location"].startswith("keep:")):
@@ -109,11 +146,18 @@ def upload_dependencies(arvrunner, name, document_loader,
     adjustFileObjs(workflowobj, setloc)
     adjustDirObjs(workflowobj, setloc)
 
+    if "$schemas" in workflowobj:
+        sch = []
+        for s in workflowobj["$schemas"]:
+            sch.append(mapper.mapper(s).resolved)
+        workflowobj["$schemas"] = sch
+
     return mapper
 
 
 def upload_docker(arvrunner, tool):
-    """Visitor which uploads Docker images referenced in CommandLineTool objects."""
+    """Uploads Docker images used in CommandLineTool objects."""
+
     if isinstance(tool, CommandLineTool):
         (docker_req, docker_is_req) = get_feature(tool, "DockerRequirement")
         if docker_req:
@@ -122,6 +166,11 @@ def upload_docker(arvrunner, tool):
                 raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
                     "Option 'dockerOutputDirectory' of DockerRequirement not supported.")
             arv_docker_get_image(arvrunner.api, docker_req, True, arvrunner.project_uuid)
+        else:
+            arv_docker_get_image(arvrunner.api, {"dockerPull": "arvados/jobs"}, True, arvrunner.project_uuid)
+    elif isinstance(tool, cwltool.workflow.Workflow):
+        for s in tool.steps:
+            upload_docker(arvrunner, s.embedded_tool)
 
 def packed_workflow(arvrunner, tool):
     """Create a packed workflow.
@@ -142,12 +191,8 @@ def tag_git_version(packed):
             packed["http://schema.org/version"] = githash
 
 
-def upload_job_order(arvrunner, name, tool, job_order):
-    """Upload local files referenced in the input object and return updated input
-    object with 'location' updated to the proper keep references.
-    """
-
-    for t in tool.tool["inputs"]:
+def discover_secondary_files(inputs, job_order):
+    for t in inputs:
         def setSecondary(fileobj):
             if isinstance(fileobj, dict) and fileobj.get("class") == "File":
                 if "secondaryFiles" not in fileobj:
@@ -160,6 +205,13 @@ def upload_job_order(arvrunner, name, tool, job_order):
         if shortname(t["id"]) in job_order and t.get("secondaryFiles"):
             setSecondary(job_order[shortname(t["id"])])
 
+def upload_job_order(arvrunner, name, tool, job_order):
+    """Upload local files referenced in the input object and return updated input
+    object with 'location' updated to the proper keep references.
+    """
+
+    discover_secondary_files(tool.tool["inputs"], job_order)
+
     jobmapper = upload_dependencies(arvrunner,
                                     name,
                                     tool.doc_loader,
@@ -177,21 +229,24 @@ def upload_job_order(arvrunner, name, tool, job_order):
 
     return job_order
 
-def upload_workflow_deps(arvrunner, tool):
+def upload_workflow_deps(arvrunner, tool, override_tools):
     # Ensure that Docker images needed by this workflow are available
-    tool.visit(partial(upload_docker, arvrunner))
+
+    upload_docker(arvrunner, tool)
 
     document_loader = tool.doc_loader
 
     def upload_tool_deps(deptool):
-        upload_dependencies(arvrunner,
-                            "%s dependencies" % (shortname(deptool["id"])),
-                            document_loader,
-                            deptool,
-                            deptool["id"],
-                            False,
-                            include_primary=False)
-        document_loader.idx[deptool["id"]] = deptool
+        if "id" in deptool:
+            upload_dependencies(arvrunner,
+                                "%s dependencies" % (shortname(deptool["id"])),
+                                document_loader,
+                                deptool,
+                                deptool["id"],
+                                False,
+                                include_primary=False)
+            document_loader.idx[deptool["id"]] = deptool
+            override_tools[deptool["id"]] = json.dumps(deptool)
 
     tool.visit(upload_tool_deps)
 
@@ -204,17 +259,49 @@ def arvados_jobs_image(arvrunner, img):
         raise Exception("Docker image %s is not available\n%s" % (img, e) )
     return img
 
+def upload_workflow_collection(arvrunner, name, packed):
+    collection = arvados.collection.Collection(api_client=arvrunner.api,
+                                               keep_client=arvrunner.keep_client,
+                                               num_retries=arvrunner.num_retries)
+    with collection.open("workflow.cwl", "w") as f:
+        f.write(json.dumps(packed, indent=2, sort_keys=True, separators=(',',': ')))
+
+    filters = [["portable_data_hash", "=", collection.portable_data_hash()],
+               ["name", "like", name+"%"]]
+    if arvrunner.project_uuid:
+        filters.append(["owner_uuid", "=", arvrunner.project_uuid])
+    exists = arvrunner.api.collections().list(filters=filters).execute(num_retries=arvrunner.num_retries)
+
+    if exists["items"]:
+        logger.info("Using collection %s", exists["items"][0]["uuid"])
+    else:
+        collection.save_new(name=name,
+                            owner_uuid=arvrunner.project_uuid,
+                            ensure_unique_name=True,
+                            num_retries=arvrunner.num_retries)
+        logger.info("Uploaded to %s", collection.manifest_locator())
+
+    return collection.portable_data_hash()
+
+
 class Runner(object):
     """Base class for runner processes, which submit an instance of
     arvados-cwl-runner and wait for the final result."""
 
     def __init__(self, runner, tool, job_order, enable_reuse,
                  output_name, output_tags, submit_runner_ram=0,
-                 name=None, on_error=None, submit_runner_image=None):
+                 name=None, on_error=None, submit_runner_image=None,
+                 intermediate_output_ttl=0):
         self.arvrunner = runner
         self.tool = tool
         self.job_order = job_order
         self.running = False
+        if enable_reuse:
+            # If reuse is permitted by command line arguments but
+            # disabled by the workflow itself, disable it.
+            reuse_req, _ = get_feature(self.tool, "http://arvados.org/cwl#ReuseRequirement")
+            if reuse_req:
+                enable_reuse = reuse_req["enableReuse"]
         self.enable_reuse = enable_reuse
         self.uuid = None
         self.final_output = None
@@ -223,11 +310,12 @@ class Runner(object):
         self.name = name
         self.on_error = on_error
         self.jobs_image = submit_runner_image or "arvados/jobs:"+__version__
+        self.intermediate_output_ttl = intermediate_output_ttl
 
         if submit_runner_ram:
             self.submit_runner_ram = submit_runner_ram
         else:
-            self.submit_runner_ram = 1024
+            self.submit_runner_ram = 3000
 
         if self.submit_runner_ram <= 0:
             raise Exception("Value of --submit-runner-ram must be greater than zero")