Merge branch 'master' into 9307-cwl-use-tmp-output
[arvados.git] / sdk / cwl / arvados_cwl / arvworkflow.py
index 6883c10e31420e39f49ff40b9a969c71f1937fe9..8eb8fe6fee50e0722ee7066171ff7b7bbc4a10c5 100644 (file)
@@ -6,15 +6,17 @@ import logging
 from cwltool.pack import pack
 from cwltool.load_tool import fetch_document
 from cwltool.process import shortname
-from cwltool.workflow import Workflow
-from cwltool.pathmapper import adjustDirObjs
+from cwltool.workflow import Workflow, WorkflowException
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
 
 import ruamel.yaml as yaml
 
-from .runner import upload_docker, upload_dependencies
+from .runner import upload_docker, upload_dependencies, trim_listing
 from .arvtool import ArvadosCommandTool
+from .perf import Perf
 
 logger = logging.getLogger('arvados.cwl-runner')
+metrics = logging.getLogger('arvados.cwl-runner.metrics')
 
 def upload_workflow(arvRunner, tool, job_order, project_uuid, update_uuid):
     upload_docker(arvRunner, tool)
@@ -23,6 +25,8 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid, update_uuid):
 
     packed = pack(document_loader, workflowobj, uri, tool.metadata)
 
+    adjustDirObjs(job_order, trim_listing)
+
     main = [p for p in packed["$graph"] if p["id"] == "#main"][0]
     for inp in main["inputs"]:
         sn = shortname(inp["id"])
@@ -59,14 +63,43 @@ class ArvadosWorkflow(Workflow):
         req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
         if req:
             document_loader, workflowobj, uri = (self.doc_loader, self.doc_loader.fetch(self.tool["id"]), self.tool["id"])
-            workflowobj["requirements"] = self.requirements + workflowobj.get("requirements", [])
-            workflowobj["hints"] = self.hints + workflowobj.get("hints", [])
-            packed = pack(document_loader, workflowobj, uri, self.metadata)
 
-            def prune_directories(obj):
-                if obj["location"].startswith("keep:"):
-                    del obj["listing"]
-            adjustDirObjs(joborder, prune_directories)
+            with Perf(metrics, "subworkflow upload_deps"):
+                workflowobj["requirements"] = self.requirements + workflowobj.get("requirements", [])
+                workflowobj["hints"] = self.hints + workflowobj.get("hints", [])
+                packed = pack(document_loader, workflowobj, uri, self.metadata)
+
+                upload_dependencies(self.arvrunner,
+                                    kwargs.get("name", ""),
+                                    document_loader,
+                                    packed,
+                                    uri,
+                                    False)
+
+                upload_dependencies(self.arvrunner,
+                                    os.path.basename(joborder.get("id", "#")),
+                                    document_loader,
+                                    joborder,
+                                    joborder.get("id", "#"),
+                                    False)
+
+            with Perf(metrics, "subworkflow adjust"):
+                joborder_keepmount = copy.deepcopy(joborder)
+
+                def keepmount(obj):
+                    if obj["location"].startswith("keep:"):
+                        obj["location"] = "/keep/" + obj["location"][5:]
+                        if "listing" in obj:
+                            del obj["listing"]
+                    elif obj["location"].startswith("_:"):
+                        del obj["location"]
+                    else:
+                        raise WorkflowException("Location is not a keep reference or a literal: '%s'" % obj["location"])
+
+                adjustFileObjs(joborder_keepmount, keepmount)
+                adjustDirObjs(joborder_keepmount, keepmount)
+                adjustFileObjs(packed, keepmount)
+                adjustDirObjs(packed, keepmount)
 
             wf_runner = {
                 "class": "CommandLineTool",
@@ -75,19 +108,18 @@ class ArvadosWorkflow(Workflow):
                 "outputs": self.tool["outputs"],
                 "stdout": "cwl.output.json",
                 "requirements": workflowobj["requirements"]+[
-                    {"class": "InlineJavascriptRequirement"},
                     {
                     "class": "InitialWorkDirRequirement",
                     "listing": [{
-                            "entryname": "workflow.json",
-                            "entry": yaml.safe_dump(packed).replace('$(', '\$(').replace('${', '\${')
+                            "entryname": "workflow.cwl",
+                            "entry": yaml.safe_dump(packed).replace("\\", "\\\\").replace('$(', '\$(').replace('${', '\${')
                         }, {
-                            "entryname": "cwl.input.json",
-                            "entry": "$(JSON.stringify(inputs))"
+                            "entryname": "cwl.input.yml",
+                            "entry": yaml.safe_dump(joborder_keepmount).replace("\\", "\\\\").replace('$(', '\$(').replace('${', '\${')
                         }]
                 }],
                 "hints": workflowobj["hints"],
-                "arguments": ["--no-container", "--move-outputs", "workflow.json", "cwl.input.json"]
+                "arguments": ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl#main", "cwl.input.yml"]
             }
             kwargs["loader"] = self.doc_loader
             kwargs["avsc_names"] = self.doc_schema