Merge branch '21535-multi-wf-delete'
[arvados.git] / sdk / cwl / arvados_cwl / arvworkflow.py
index 3ad2c6419a2b9ce39c11b6471e3dac6555c317ef..dae68459bc3f6a00ea5c3bab3de03dbc64663a4e 100644 (file)
@@ -2,18 +2,15 @@
 #
 # SPDX-License-Identifier: Apache-2.0
 
-from past.builtins import basestring
-from future.utils import viewitems
-
 import os
 import json
 import copy
 import logging
 import urllib
-from io import StringIO
 import sys
 import re
 
+from io import StringIO
 from typing import (MutableSequence, MutableMapping)
 
 from ruamel.yaml import YAML
@@ -38,6 +35,7 @@ import ruamel.yaml as yaml
 from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection,
                      trim_anonymous_location, remove_redundant_fields, discover_secondary_files,
                      make_builder, arvados_jobs_image, FileUpdates)
+from .arvcontainer import RunnerContainer
 from .pathmapper import ArvPathMapper, trim_listing
 from .arvtool import ArvadosCommandTool, set_cluster_target
 from ._version import __version__
@@ -147,7 +145,7 @@ def make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info,
 
 
 def rel_ref(s, baseuri, urlexpander, merged_map, jobmapper):
-    if s.startswith("keep:"):
+    if s.startswith("keep:") or s.startswith("arvwf:"):
         return s
 
     uri = urlexpander(s, baseuri)
@@ -587,7 +585,7 @@ class ArvadosWorkflowStep(WorkflowStep):
         runtimeContext = runtimeContext.copy()
         runtimeContext.toplevel = True  # Preserve behavior for #13365
 
-        builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements,
+        builder = make_builder({shortname(k): v for k, v in joborder.items()}, self.hints, self.requirements,
                                runtimeContext, self.metadata)
         runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
         return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext)
@@ -616,17 +614,8 @@ class ArvadosWorkflow(Workflow):
         super(ArvadosWorkflow, self).__init__(toolpath_object, self.loadingContext)
         self.cluster_target_req, _ = self.get_requirement("http://arvados.org/cwl#ClusterTarget")
 
-    def job(self, joborder, output_callback, runtimeContext):
-
-        builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata)
-        runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
-
-        req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
-        if not req:
-            return super(ArvadosWorkflow, self).job(joborder, output_callback, runtimeContext)
-
-        # RunInSingleContainer is true
 
+    def runInSingleContainer(self, joborder, output_callback, runtimeContext, builder):
         with SourceLine(self.tool, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)):
             if "id" not in self.tool:
                 raise WorkflowException("%s object must have 'id'" % (self.tool["class"]))
@@ -663,7 +652,7 @@ class ArvadosWorkflow(Workflow):
                                 dyn = False
                                 for k in max_res_pars + sum_res_pars:
                                     if k in req:
-                                        if isinstance(req[k], basestring):
+                                        if isinstance(req[k], str):
                                             if item["id"] == "#main":
                                                 # only the top-level requirements/hints may contain expressions
                                                 self.dynamic_resource_req.append(req)
@@ -789,6 +778,51 @@ class ArvadosWorkflow(Workflow):
         })
         return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext)
 
+
+    def separateRunner(self, joborder, output_callback, runtimeContext, req, builder):
+
+        name = runtimeContext.name
+
+        rpn = req.get("runnerProcessName")
+        if rpn:
+            name = builder.do_eval(rpn)
+
+        return RunnerContainer(self.arvrunner,
+                               self,
+                               self.loadingContext,
+                               runtimeContext.enable_reuse,
+                               None,
+                               None,
+                               submit_runner_ram=runtimeContext.submit_runner_ram,
+                               name=name,
+                               on_error=runtimeContext.on_error,
+                               submit_runner_image=runtimeContext.submit_runner_image,
+                               intermediate_output_ttl=runtimeContext.intermediate_output_ttl,
+                               merged_map=None,
+                               priority=runtimeContext.priority,
+                               secret_store=self.arvrunner.secret_store,
+                               collection_cache_size=runtimeContext.collection_cache_size,
+                               collection_cache_is_default=self.arvrunner.should_estimate_cache_size,
+                               git_info=runtimeContext.git_info,
+                               reuse_runner=True).job(joborder, output_callback, runtimeContext)
+
+
+    def job(self, joborder, output_callback, runtimeContext):
+
+        builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata)
+        runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
+
+        req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
+        if req:
+            return self.runInSingleContainer(joborder, output_callback, runtimeContext, builder)
+
+        req, _ = self.get_requirement("http://arvados.org/cwl#SeparateRunner")
+        if req:
+            return self.separateRunner(joborder, output_callback, runtimeContext, req, builder)
+
+        return super(ArvadosWorkflow, self).job(joborder, output_callback, runtimeContext)
+
+
     def make_workflow_step(self,
                            toolpath_object,      # type: Dict[Text, Any]
                            pos,                  # type: int