Update tests related to make_output_collection
[arvados.git] / sdk / cwl / arvados_cwl / arvcontainer.py
index c9170c51b732deee8f5a8bc383746c527ae5def7..d53ec5c7f8590430ee623701724bcc4757e8b04e 100644 (file)
@@ -28,7 +28,7 @@ import arvados.collection
 
 from .arvdocker import arv_docker_get_image
 from . import done
-from .runner import Runner, arvados_jobs_image, packed_workflow, trim_anonymous_location, remove_redundant_fields
+from .runner import Runner, arvados_jobs_image, packed_workflow, trim_anonymous_location, remove_redundant_fields, make_builder
 from .fsaccess import CollectionFetcher
 from .pathmapper import NoFollowPathMapper, trim_listing
 from .perf import Perf
@@ -57,7 +57,13 @@ class ArvadosContainer(JobBase):
     def update_pipeline_component(self, r):
         pass
 
-    def run(self, runtimeContext):
+    def _required_env(self):
+        env = {}
+        env["HOME"] = self.outdir
+        env["TMPDIR"] = self.tmpdir
+        return env
+
+    def run(self, toplevelRuntimeContext):
         # ArvadosCommandTool subclasses from cwltool.CommandLineTool,
         # which calls makeJobRunner() to get a new ArvadosContainer
         # object.  The fields that define execution such as
@@ -67,15 +73,21 @@ class ArvadosContainer(JobBase):
 
         runtimeContext = self.job_runtime
 
-        container_request = {
-            "command": self.command_line,
-            "name": self.name,
-            "output_path": self.outdir,
-            "cwd": self.outdir,
-            "priority": runtimeContext.priority,
-            "state": "Committed",
-            "properties": {},
-        }
+        if runtimeContext.submit_request_uuid:
+            container_request = self.arvrunner.api.container_requests().get(
+                uuid=runtimeContext.submit_request_uuid
+            ).execute(num_retries=self.arvrunner.num_retries)
+        else:
+            container_request = {}
+
+        container_request["command"] = self.command_line
+        container_request["name"] = self.name
+        container_request["output_path"] = self.outdir
+        container_request["cwd"] = self.outdir
+        container_request["priority"] = runtimeContext.priority
+        container_request["state"] = "Committed"
+        container_request.setdefault("properties", {})
+
         runtime_constraints = {}
 
         if runtimeContext.project_uuid:
@@ -134,6 +146,8 @@ class ArvadosContainer(JobBase):
                     mounts[targetdir]["path"] = path
             prevdir = targetdir + "/"
 
+        intermediate_collection_info = arvados_cwl.util.get_intermediate_collection_info(self.name, runtimeContext.current_container, runtimeContext.intermediate_output_ttl)
+
         with Perf(metrics, "generatefiles %s" % self.name):
             if self.generatefiles["listing"]:
                 vwd = arvados.collection.Collection(api_client=self.arvrunner.api,
@@ -185,12 +199,11 @@ class ArvadosContainer(JobBase):
 
                 if not runtimeContext.current_container:
                     runtimeContext.current_container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger)
-                info = arvados_cwl.util.get_intermediate_collection_info(self.name, runtimeContext.current_container, runtimeContext.intermediate_output_ttl)
-                vwd.save_new(name=info["name"],
+                vwd.save_new(name=intermediate_collection_info["name"],
                              owner_uuid=runtimeContext.project_uuid,
                              ensure_unique_name=True,
-                             trash_at=info["trash_at"],
-                             properties=info["properties"])
+                             trash_at=intermediate_collection_info["trash_at"],
+                             properties=intermediate_collection_info["properties"])
 
                 prev = None
                 for f, p in sorteditems:
@@ -228,15 +241,15 @@ class ArvadosContainer(JobBase):
                                 "path": "%s/%s" % (self.outdir, self.stdout)}
 
         (docker_req, docker_is_req) = self.get_requirement("DockerRequirement")
-        if not docker_req:
-            docker_req = {"dockerImageId": "arvados/jobs:"+__version__}
 
         container_request["container_image"] = arv_docker_get_image(self.arvrunner.api,
                                                                     docker_req,
                                                                     runtimeContext.pull_image,
                                                                     runtimeContext.project_uuid,
                                                                     runtimeContext.force_docker_pull,
-                                                                    runtimeContext.tmp_outdir_prefix)
+                                                                    runtimeContext.tmp_outdir_prefix,
+                                                                    runtimeContext.match_local_docker,
+                                                                    runtimeContext.copy_deps)
 
         network_req, _ = self.get_requirement("NetworkAccess")
         if network_req:
@@ -273,11 +286,32 @@ class ArvadosContainer(JobBase):
         if self.output_ttl < 0:
             raise WorkflowException("Invalid value %d for output_ttl, cannot be less than zero" % container_request["output_ttl"])
 
-        storage_class_req, _ = self.get_requirement("http://arvados.org/cwl#OutputStorageClass")
-        if storage_class_req and storage_class_req.get("intermediateStorageClass"):
-            container_request["output_storage_classes"] = aslist(storage_class_req["intermediateStorageClass"])
+
+        if self.arvrunner.api._rootDesc["revision"] >= "20210628":
+            storage_class_req, _ = self.get_requirement("http://arvados.org/cwl#OutputStorageClass")
+            if storage_class_req and storage_class_req.get("intermediateStorageClass"):
+                container_request["output_storage_classes"] = aslist(storage_class_req["intermediateStorageClass"])
+            else:
+                container_request["output_storage_classes"] = runtimeContext.intermediate_storage_classes.strip().split(",")
+
+        cuda_req, _ = self.get_requirement("http://commonwl.org/cwltool#CUDARequirement")
+        if cuda_req:
+            runtime_constraints["cuda"] = {
+                "device_count": resources.get("cudaDeviceCount", 1),
+                "driver_version": cuda_req["cudaVersionMin"],
+                "hardware_capability": aslist(cuda_req["cudaComputeCapability"])[0]
+            }
+
+        if runtimeContext.enable_preemptible is False:
+            scheduling_parameters["preemptible"] = False
         else:
-            container_request["output_storage_classes"] = runtimeContext.intermediate_storage_classes.strip().split(",")
+            preemptible_req, _ = self.get_requirement("http://arvados.org/cwl#UsePreemptible")
+            if preemptible_req:
+                scheduling_parameters["preemptible"] = preemptible_req["usePreemptible"]
+            elif runtimeContext.enable_preemptible is True:
+                scheduling_parameters["preemptible"] = True
+            elif runtimeContext.enable_preemptible is None:
+                pass
 
         if self.timelimit is not None and self.timelimit > 0:
             scheduling_parameters["max_run_time"] = self.timelimit
@@ -303,6 +337,21 @@ class ArvadosContainer(JobBase):
                 enable_reuse = reuse_req["enableReuse"]
         container_request["use_existing"] = enable_reuse
 
+        properties_req, _ = self.get_requirement("http://arvados.org/cwl#ProcessProperties")
+        if properties_req:
+            for pr in properties_req["processProperties"]:
+                container_request["properties"][pr["propertyName"]] = self.builder.do_eval(pr["propertyValue"])
+
+        output_properties_req, _ = self.get_requirement("http://arvados.org/cwl#OutputCollectionProperties")
+        if output_properties_req:
+            if self.arvrunner.api._rootDesc["revision"] >= "20220510":
+                container_request["output_properties"] = {}
+                for pr in output_properties_req["outputProperties"]:
+                    container_request["output_properties"][pr["propertyName"]] = self.builder.do_eval(pr["propertyValue"])
+            else:
+                logger.warning("%s API server is too old to support setting properties on output collections.",
+                               self.arvrunner.label(self))
+
         if runtimeContext.runnerjob.startswith("arvwf:"):
             wfuuid = runtimeContext.runnerjob[6:runtimeContext.runnerjob.index("#")]
             wfrecord = self.arvrunner.api.workflows().get(uuid=wfuuid).execute(num_retries=self.arvrunner.num_retries)
@@ -355,6 +404,10 @@ class ArvadosContainer(JobBase):
                     processStatus = "success"
                 else:
                     processStatus = "permanentFail"
+
+                if rcode == 137:
+                    logger.warning("%s Container may have been killed for using too much RAM.  Try resubmitting with a higher 'ramMin'.",
+                                 self.arvrunner.label(self))
             else:
                 processStatus = "permanentFail"
 
@@ -424,7 +477,7 @@ class RunnerContainer(Runner):
             "cwd": "/var/spool/cwl",
             "priority": self.priority,
             "state": "Committed",
-            "container_image": arvados_jobs_image(self.arvrunner, self.jobs_image),
+            "container_image": arvados_jobs_image(self.arvrunner, self.jobs_image, runtimeContext),
             "mounts": {
                 "/var/lib/cwl/cwl.input.json": {
                     "kind": "json",
@@ -459,7 +512,7 @@ class RunnerContainer(Runner):
                 "portable_data_hash": "%s" % workflowcollection
             }
         else:
-            packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map)
+            packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map, runtimeContext)
             workflowpath = "/var/lib/cwl/workflow.json#main"
             container_req["mounts"]["/var/lib/cwl/workflow.json"] = {
                 "kind": "json",
@@ -468,6 +521,11 @@ class RunnerContainer(Runner):
             if self.embedded_tool.tool.get("id", "").startswith("arvwf:"):
                 container_req["properties"]["template_uuid"] = self.embedded_tool.tool["id"][6:33]
 
+        properties_req, _ = self.embedded_tool.get_requirement("http://arvados.org/cwl#ProcessProperties")
+        if properties_req:
+            builder = make_builder(self.job_order, self.embedded_tool.hints, self.embedded_tool.requirements, runtimeContext, self.embedded_tool.metadata)
+            for pr in properties_req["processProperties"]:
+                container_req["properties"][pr["propertyName"]] = builder.do_eval(pr["propertyValue"])
 
         # --local means execute the workflow instead of submitting a container request
         # --api=containers means use the containers API
@@ -498,27 +556,33 @@ class RunnerContainer(Runner):
         if runtimeContext.debug:
             command.append("--debug")
 
-        if runtimeContext.storage_classes != "default":
+        if runtimeContext.storage_classes != "default" and runtimeContext.storage_classes:
             command.append("--storage-classes=" + runtimeContext.storage_classes)
 
-        if runtimeContext.intermediate_storage_classes != "default":
+        if runtimeContext.intermediate_storage_classes != "default" and runtimeContext.intermediate_storage_classes:
             command.append("--intermediate-storage-classes=" + runtimeContext.intermediate_storage_classes)
 
-        if self.on_error:
+        if runtimeContext.on_error:
             command.append("--on-error=" + self.on_error)
 
-        if self.intermediate_output_ttl:
-            command.append("--intermediate-output-ttl=%d" % self.intermediate_output_ttl)
+        if runtimeContext.intermediate_output_ttl:
+            command.append("--intermediate-output-ttl=%d" % runtimeContext.intermediate_output_ttl)
 
-        if self.arvrunner.trash_intermediate:
+        if runtimeContext.trash_intermediate:
             command.append("--trash-intermediate")
 
-        if self.arvrunner.project_uuid:
-            command.append("--project-uuid="+self.arvrunner.project_uuid)
+        if runtimeContext.project_uuid:
+            command.append("--project-uuid="+runtimeContext.project_uuid)
 
         if self.enable_dev:
             command.append("--enable-dev")
 
+        if runtimeContext.enable_preemptible is True:
+            command.append("--enable-preemptible")
+
+        if runtimeContext.enable_preemptible is False:
+            command.append("--disable-preemptible")
+
         command.extend([workflowpath, "/var/lib/cwl/cwl.input.json"])
 
         container_req["command"] = command
@@ -529,8 +593,8 @@ class RunnerContainer(Runner):
     def run(self, runtimeContext):
         runtimeContext.keepprefix = "keep:"
         job_spec = self.arvados_job_spec(runtimeContext)
-        if self.arvrunner.project_uuid:
-            job_spec["owner_uuid"] = self.arvrunner.project_uuid
+        if runtimeContext.project_uuid:
+            job_spec["owner_uuid"] = runtimeContext.project_uuid
 
         extra_submit_params = {}
         if runtimeContext.submit_runner_cluster: