13365: Discover secondaryFiles associated with default files inputs
authorPeter Amstutz <pamstutz@veritasgenetics.com>
Tue, 17 Apr 2018 19:23:46 +0000 (15:23 -0400)
committerPeter Amstutz <pamstutz@veritasgenetics.com>
Wed, 2 May 2018 17:59:26 +0000 (13:59 -0400)
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <pamstutz@veritasgenetics.com>

sdk/cwl/arvados_cwl/__init__.py
sdk/cwl/arvados_cwl/runner.py

index 12762118e62bc9fa1e2b8cc656056fbb60b01612..8b31ece07be26010c6d2e3b81572bed513f00cfb 100644 (file)
@@ -99,6 +99,11 @@ class ArvCwlRunner(object):
 
         self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
 
+        self.fetcher_constructor = partial(CollectionFetcher,
+                                           api_client=self.api,
+                                           fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
+                                           num_retries=self.num_retries)
+
         self.work_api = None
         expected_api = ["jobs", "containers"]
         for api in expected_api:
@@ -119,10 +124,7 @@ class ArvCwlRunner(object):
 
     def arv_make_tool(self, toolpath_object, **kwargs):
         kwargs["work_api"] = self.work_api
-        kwargs["fetcher_constructor"] = partial(CollectionFetcher,
-                                                api_client=self.api,
-                                                fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
-                                                num_retries=self.num_retries)
+        kwargs["fetcher_constructor"] = self.fetcher_constructor
         kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries)
         if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
             return ArvadosCommandTool(self, toolpath_object, **kwargs)
index bf0eb081290c3ec36b4579ee75ecaa886b0f553a..6491933f2c5548330d0c75582a179c8e747367e6 100644 (file)
@@ -8,10 +8,11 @@ from functools import partial
 import logging
 import json
 import subprocess
+from collections import namedtuple
 
 from StringIO import StringIO
 
-from schema_salad.sourceline import SourceLine
+from schema_salad.sourceline import SourceLine, cmap
 
 from cwltool.command_line_tool import CommandLineTool
 import cwltool.workflow
@@ -45,11 +46,13 @@ def trim_anonymous_location(obj):
     if obj.get("location", "").startswith("_:"):
         del obj["location"]
 
+
 def remove_redundant_fields(obj):
     for field in ("path", "nameext", "nameroot", "dirname"):
         if field in obj:
             del obj[field]
 
+
 def find_defaults(d, op):
     if isinstance(d, list):
         for i in d:
@@ -61,8 +64,27 @@ def find_defaults(d, op):
             for i in d.itervalues():
                 find_defaults(i, op)
 
+
+def discover_secondary_files(inputs, job_order, discovered=None):
+    for t in inputs:
+        def setSecondary(fileobj):
+            if isinstance(fileobj, dict) and fileobj.get("class") == "File":
+                if "secondaryFiles" not in fileobj:
+                    fileobj["secondaryFiles"] = cmap([{"location": substitute(fileobj["location"], sf), "class": "File"} for sf in t["secondaryFiles"]])
+                    if discovered is not None:
+                        discovered[fileobj["location"]] = fileobj["secondaryFiles"]
+
+            if isinstance(fileobj, list):
+                for e in fileobj:
+                    setSecondary(e)
+
+        if shortname(t["id"]) in job_order and t.get("secondaryFiles"):
+            setSecondary(job_order[shortname(t["id"])])
+
+
 def upload_dependencies(arvrunner, name, document_loader,
-                        workflowobj, uri, loadref_run, include_primary=True):
+                        workflowobj, uri, loadref_run,
+                        include_primary=True, discovered_secondaryfiles=None):
     """Upload the dependencies of the workflowobj document to Keep.
 
     Returns a pathmapper object mapping local paths to keep references.  Also
@@ -116,22 +138,33 @@ def upload_dependencies(arvrunner, name, document_loader,
         for s in workflowobj["$schemas"]:
             sc.append({"class": "File", "location": s})
 
-    def capture_default(obj):
+    def visit_default(obj):
         remove = [False]
-        def add_default(f):
+        def ensure_default_location(f):
             if "location" not in f and "path" in f:
                 f["location"] = f["path"]
                 del f["path"]
             if "location" in f and not arvrunner.fs_access.exists(f["location"]):
-                # Remove from sc
+                # Doesn't exist, remove from list of dependencies to upload
                 sc[:] = [x for x in sc if x["location"] != f["location"]]
                 # Delete "default" from workflowobj
                 remove[0] = True
-        visit_class(obj["default"], ("File", "Directory"), add_default)
+        visit_class(obj["default"], ("File", "Directory"), ensure_default_location)
         if remove[0]:
             del obj["default"]
 
-    find_defaults(workflowobj, capture_default)
+    find_defaults(workflowobj, visit_default)
+
+    discovered = {}
+    def discover_default_secondary_files(obj):
+        discover_secondary_files(obj["inputs"],
+                                 {shortname(t["id"]): t["default"] for t in obj["inputs"] if "default" in t},
+                                 discovered)
+
+    visit_class(workflowobj, ("CommandLineTool", "Workflow"), discover_default_secondary_files)
+
+    for d in discovered:
+        sc.extend(discovered[d])
 
     mapper = ArvPathMapper(arvrunner, sc, "",
                            "keep:%s",
@@ -142,8 +175,13 @@ def upload_dependencies(arvrunner, name, document_loader,
     def setloc(p):
         if "location" in p and (not p["location"].startswith("_:")) and (not p["location"].startswith("keep:")):
             p["location"] = mapper.mapper(p["location"]).resolved
-    adjustFileObjs(workflowobj, setloc)
-    adjustDirObjs(workflowobj, setloc)
+
+    visit_class(workflowobj, ("File", "Directory"), setloc)
+    visit_class(discovered, ("File", "Directory"), setloc)
+
+    if discovered_secondaryfiles is not None:
+        for d in discovered:
+            discovered_secondaryfiles[mapper.mapper(d).resolved] = discovered[d]
 
     if "$schemas" in workflowobj:
         sch = []
@@ -171,6 +209,7 @@ def upload_docker(arvrunner, tool):
         for s in tool.steps:
             upload_docker(arvrunner, s.embedded_tool)
 
+
 def packed_workflow(arvrunner, tool, merged_map):
     """Create a packed workflow.
 
@@ -189,7 +228,9 @@ def packed_workflow(arvrunner, tool, merged_map):
             if v.get("class") in ("CommandLineTool", "Workflow"):
                 cur_id = rewrite_to_orig.get(v["id"], v["id"])
             if "location" in v and not v["location"].startswith("keep:"):
-                v["location"] = merged_map[cur_id][v["location"]]
+                v["location"] = merged_map[cur_id].resolved[v["location"]]
+            if "location" in v and v["location"] in merged_map[cur_id].secondaryFiles:
+                v["secondaryFiles"] = merged_map[cur_id].secondaryFiles[v["location"]]
             for l in v:
                 visit(v[l], cur_id)
         if isinstance(v, list):
@@ -198,6 +239,7 @@ def packed_workflow(arvrunner, tool, merged_map):
     visit(packed, None)
     return packed
 
+
 def tag_git_version(packed):
     if tool.tool["id"].startswith("file://"):
         path = os.path.dirname(tool.tool["id"][7:])
@@ -209,20 +251,6 @@ def tag_git_version(packed):
             packed["http://schema.org/version"] = githash
 
 
-def discover_secondary_files(inputs, job_order):
-    for t in inputs:
-        def setSecondary(fileobj):
-            if isinstance(fileobj, dict) and fileobj.get("class") == "File":
-                if "secondaryFiles" not in fileobj:
-                    fileobj["secondaryFiles"] = [{"location": substitute(fileobj["location"], sf), "class": "File"} for sf in t["secondaryFiles"]]
-
-            if isinstance(fileobj, list):
-                for e in fileobj:
-                    setSecondary(e)
-
-        if shortname(t["id"]) in job_order and t.get("secondaryFiles"):
-            setSecondary(job_order[shortname(t["id"])])
-
 def upload_job_order(arvrunner, name, tool, job_order):
     """Upload local files referenced in the input object and return updated input
     object with 'location' updated to the proper keep references.
@@ -247,6 +275,8 @@ def upload_job_order(arvrunner, name, tool, job_order):
 
     return job_order
 
+FileUpdates = namedtuple("FileUpdates", ["resolved", "secondaryFiles"])
+
 def upload_workflow_deps(arvrunner, tool):
     # Ensure that Docker images needed by this workflow are available
 
@@ -258,18 +288,20 @@ def upload_workflow_deps(arvrunner, tool):
 
     def upload_tool_deps(deptool):
         if "id" in deptool:
+            discovered_secondaryfiles = {}
             pm = upload_dependencies(arvrunner,
-                                "%s dependencies" % (shortname(deptool["id"])),
-                                document_loader,
-                                deptool,
-                                deptool["id"],
-                                False,
-                                include_primary=False)
+                                     "%s dependencies" % (shortname(deptool["id"])),
+                                     document_loader,
+                                     deptool,
+                                     deptool["id"],
+                                     False,
+                                     include_primary=False,
+                                     discovered_secondaryfiles=discovered_secondaryfiles)
             document_loader.idx[deptool["id"]] = deptool
             toolmap = {}
             for k,v in pm.items():
                 toolmap[k] = v.resolved
-            merged_map[deptool["id"]] = toolmap
+            merged_map[deptool["id"]] = FileUpdates(toolmap, discovered_secondaryfiles)
 
     tool.visit(upload_tool_deps)