Merge branch '13301-cwl-resource-scatter' closes #13301
[arvados.git] / sdk / cwl / arvados_cwl / runner.py
index 28de7f368a23ccbc52a0cae37fa55d358744d0e1..053c99502bf06e0c5829b67a4563bfd7544b8c1a 100644 (file)
@@ -13,8 +13,7 @@ from StringIO import StringIO
 
 from schema_salad.sourceline import SourceLine
 
-import cwltool.draft2tool
-from cwltool.draft2tool import CommandLineTool
+from cwltool.command_line_tool import CommandLineTool
 import cwltool.workflow
 from cwltool.process import get_feature, scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname
 from cwltool.load_tool import fetch_document
@@ -161,7 +160,7 @@ def upload_docker(arvrunner, tool):
     if isinstance(tool, CommandLineTool):
         (docker_req, docker_is_req) = get_feature(tool, "DockerRequirement")
         if docker_req:
-            if docker_req.get("dockerOutputDirectory"):
+            if docker_req.get("dockerOutputDirectory") and arvrunner.work_api != "containers":
                 # TODO: can be supported by containers API, but not jobs API.
                 raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
                     "Option 'dockerOutputDirectory' of DockerRequirement not supported.")
@@ -172,13 +171,32 @@ def upload_docker(arvrunner, tool):
         for s in tool.steps:
             upload_docker(arvrunner, s.embedded_tool)
 
-def packed_workflow(arvrunner, tool):
+def packed_workflow(arvrunner, tool, merged_map):
     """Create a packed workflow.
 
     A "packed" workflow is one where all the components have been combined into a single document."""
 
-    return pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]),
-                tool.tool["id"], tool.metadata)
+    rewrites = {}
+    packed = pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]),
+                  tool.tool["id"], tool.metadata, rewrite_out=rewrites)
+
+    rewrite_to_orig = {}
+    for k,v in rewrites.items():
+        rewrite_to_orig[v] = k
+
+    def visit(v, cur_id):
+        if isinstance(v, dict):
+            if v.get("class") in ("CommandLineTool", "Workflow"):
+                cur_id = rewrite_to_orig.get(v["id"], v["id"])
+            if "location" in v and not v["location"].startswith("keep:"):
+                v["location"] = merged_map[cur_id][v["location"]]
+            for l in v:
+                visit(v[l], cur_id)
+        if isinstance(v, list):
+            for l in v:
+                visit(l, cur_id)
+    visit(packed, None)
+    return packed
 
 def tag_git_version(packed):
     if tool.tool["id"].startswith("file://"):
@@ -229,16 +247,18 @@ def upload_job_order(arvrunner, name, tool, job_order):
 
     return job_order
 
-def upload_workflow_deps(arvrunner, tool, override_tools):
+def upload_workflow_deps(arvrunner, tool):
     # Ensure that Docker images needed by this workflow are available
 
     upload_docker(arvrunner, tool)
 
     document_loader = tool.doc_loader
 
+    merged_map = {}
+
     def upload_tool_deps(deptool):
         if "id" in deptool:
-            upload_dependencies(arvrunner,
+            pm = upload_dependencies(arvrunner,
                                 "%s dependencies" % (shortname(deptool["id"])),
                                 document_loader,
                                 deptool,
@@ -246,10 +266,15 @@ def upload_workflow_deps(arvrunner, tool, override_tools):
                                 False,
                                 include_primary=False)
             document_loader.idx[deptool["id"]] = deptool
-            override_tools[deptool["id"]] = json.dumps(deptool)
+            toolmap = {}
+            for k,v in pm.items():
+                toolmap[k] = v.resolved
+            merged_map[deptool["id"]] = toolmap
 
     tool.visit(upload_tool_deps)
 
+    return merged_map
+
 def arvados_jobs_image(arvrunner, img):
     """Determine if the right arvados/jobs image version is available.  If not, try to pull and upload it."""
 
@@ -291,7 +316,8 @@ class Runner(object):
     def __init__(self, runner, tool, job_order, enable_reuse,
                  output_name, output_tags, submit_runner_ram=0,
                  name=None, on_error=None, submit_runner_image=None,
-                 intermediate_output_ttl=0):
+                 intermediate_output_ttl=0, merged_map=None, priority=None,
+                 secret_store=None):
         self.arvrunner = runner
         self.tool = tool
         self.job_order = job_order
@@ -311,6 +337,8 @@ class Runner(object):
         self.on_error = on_error
         self.jobs_image = submit_runner_image or "arvados/jobs:"+__version__
         self.intermediate_output_ttl = intermediate_output_ttl
+        self.priority = priority
+        self.secret_store = secret_store
 
         if submit_runner_ram:
             self.submit_runner_ram = submit_runner_ram
@@ -320,6 +348,8 @@ class Runner(object):
         if self.submit_runner_ram <= 0:
             raise Exception("Value of --submit-runner-ram must be greater than zero")
 
+        self.merged_map = merged_map or {}
+
     def update_pipeline_component(self, record):
         pass