Fix CollectionCache to pass num_retries to CollectionReader
[arvados.git] / sdk / cwl / arvados_cwl / arvjob.py
index 7b318026d57a0da8ef3caa5a4b91f768e824431a..2731b2694422fcf8a986057266efe23354830c46 100644 (file)
@@ -1,3 +1,7 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
 import logging
 import re
 import copy
@@ -9,17 +13,18 @@ from cwltool.errors import WorkflowException
 from cwltool.draft2tool import revmap_file, CommandLineTool
 from cwltool.load_tool import fetch_document
 from cwltool.builder import Builder
-from cwltool.pathmapper import adjustDirObjs
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
 
 from schema_salad.sourceline import SourceLine
 
 import ruamel.yaml as yaml
 
 import arvados.collection
+from arvados.errors import ApiError
 
 from .arvdocker import arv_docker_get_image
-from .runner import Runner, arvados_jobs_image, packed_workflow, trim_listing
-from .pathmapper import InitialWorkDirPathMapper
+from .runner import Runner, arvados_jobs_image, packed_workflow, upload_workflow_collection, trim_anonymous_location, remove_redundant_fields
+from .pathmapper import VwdPathMapper, trim_listing
 from .perf import Perf
 from . import done
 from ._version import __version__
@@ -27,7 +32,7 @@ from ._version import __version__
 logger = logging.getLogger('arvados.cwl-runner')
 metrics = logging.getLogger('arvados.cwl-runner.metrics')
 
-crunchrunner_re = re.compile(r"^\S+ \S+ \d+ \d+ stderr \S+ \S+ crunchrunner: \$\(task\.(tmpdir|outdir|keep)\)=(.*)")
+crunchrunner_re = re.compile(r"^.*crunchrunner: \$\(task\.(tmpdir|outdir|keep)\)=(.*)$")
 
 crunchrunner_git_commit = 'a3f2cb186e437bfce0031b024b2157b73ed2717d'
 
@@ -51,8 +56,8 @@ class ArvadosJob(object):
                                                     keep_client=self.arvrunner.keep_client,
                                                     num_retries=self.arvrunner.num_retries)
                 script_parameters["task.vwd"] = {}
-                generatemapper = InitialWorkDirPathMapper([self.generatefiles], "", "",
-                                                          separateDirs=False)
+                generatemapper = VwdPathMapper([self.generatefiles], "", "",
+                                               separateDirs=False)
 
                 with Perf(metrics, "createfiles %s" % self.name):
                     for f, p in generatemapper.items():
@@ -60,8 +65,9 @@ class ArvadosJob(object):
                             with vwd.open(p.target, "w") as n:
                                 n.write(p.resolved.encode("utf-8"))
 
-                with Perf(metrics, "generatefiles.save_new %s" % self.name):
-                    vwd.save_new()
+                if vwd:
+                    with Perf(metrics, "generatefiles.save_new %s" % self.name):
+                        vwd.save_new()
 
                 for f, p in generatemapper.items():
                     if p.type == "File":
@@ -109,6 +115,7 @@ class ArvadosJob(object):
         if runtime_req:
             if "keep_cache" in runtime_req:
                 runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"]
+                runtime_constraints["min_ram_mb_per_node"] += runtime_req["keep_cache"]
             if "outputDirType" in runtime_req:
                 if runtime_req["outputDirType"] == "local_output_dir":
                     script_parameters["task.keepTmpOutput"] = False
@@ -121,6 +128,12 @@ class ArvadosJob(object):
         if not self.arvrunner.ignore_docker_for_reuse:
             filters.append(["docker_image_locator", "in docker", runtime_constraints["docker_image"]])
 
+        enable_reuse = kwargs.get("enable_reuse", True)
+        if enable_reuse:
+            reuse_req, _ = get_feature(self, "http://arvados.org/cwl#ReuseRequirement")
+            if reuse_req:
+                enable_reuse = reuse_req["enableReuse"]
+
         try:
             with Perf(metrics, "create %s" % self.name):
                 response = self.arvrunner.api.jobs().create(
@@ -134,18 +147,35 @@ class ArvadosJob(object):
                         "runtime_constraints": runtime_constraints
                     },
                     filters=filters,
-                    find_or_create=kwargs.get("enable_reuse", True)
+                    find_or_create=enable_reuse
                 ).execute(num_retries=self.arvrunner.num_retries)
 
             self.arvrunner.processes[response["uuid"]] = self
 
             self.update_pipeline_component(response)
 
-            logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"])
+            if response["state"] == "Complete":
+                logger.info("%s reused job %s", self.arvrunner.label(self), response["uuid"])
+                # Give read permission to the desired project on reused jobs
+                if response["owner_uuid"] != self.arvrunner.project_uuid:
+                    try:
+                        self.arvrunner.api.links().create(body={
+                            'link_class': 'permission',
+                            'name': 'can_read',
+                            'tail_uuid': self.arvrunner.project_uuid,
+                            'head_uuid': response["uuid"],
+                            }).execute(num_retries=self.arvrunner.num_retries)
+                    except ApiError as e:
+                        # The user might not have "manage" access on the job: log
+                        # a message and continue.
+                        logger.info("Creating read permission on job %s: %s",
+                                    response["uuid"],
+                                    e)
 
-            if response["state"] in ("Complete", "Failed", "Cancelled"):
                 with Perf(metrics, "done %s" % self.name):
                     self.done(response)
+            else:
+                logger.info("%s %s is %s", self.arvrunner.label(self), response["uuid"], response["state"])
         except Exception as e:
             logger.exception("%s error" % (self.arvrunner.label(self)))
             self.output_callback({}, "permanentFail")
@@ -154,17 +184,19 @@ class ArvadosJob(object):
         if self.arvrunner.pipeline:
             self.arvrunner.pipeline["components"][self.name] = {"job": record}
             with Perf(metrics, "update_pipeline_component %s" % self.name):
-                self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"],
-                                                                                 body={
-                                                                                    "components": self.arvrunner.pipeline["components"]
-                                                                                 }).execute(num_retries=self.arvrunner.num_retries)
+                self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(
+                    uuid=self.arvrunner.pipeline["uuid"],
+                    body={
+                        "components": self.arvrunner.pipeline["components"]
+                    }).execute(num_retries=self.arvrunner.num_retries)
         if self.arvrunner.uuid:
             try:
                 job = self.arvrunner.api.jobs().get(uuid=self.arvrunner.uuid).execute()
                 if job:
                     components = job["components"]
                     components[self.name] = record["uuid"]
-                    self.arvrunner.api.jobs().update(uuid=self.arvrunner.uuid,
+                    self.arvrunner.api.jobs().update(
+                        uuid=self.arvrunner.uuid,
                         body={
                             "components": components
                         }).execute(num_retries=self.arvrunner.num_retries)
@@ -192,12 +224,13 @@ class ArvadosJob(object):
                                                                    keep_client=self.arvrunner.keep_client,
                                                                    num_retries=self.arvrunner.num_retries)
                         log = logc.open(logc.keys()[0])
-                        dirs = {}
-                        tmpdir = None
-                        outdir = None
-                        keepdir = None
+                        dirs = {
+                            "tmpdir": "/tmpdir",
+                            "outdir": "/outdir",
+                            "keep": "/keep"
+                        }
                         for l in log:
-                            # Determine the tmpdir, outdir and keepdir paths from
+                            # Determine the tmpdir, outdir and keep paths from
                             # the job run.  Unfortunately, we can't take the first
                             # values we find (which are expected to be near the
                             # top) and stop scanning because if the node fails and
@@ -236,30 +269,6 @@ class ArvadosJob(object):
 class RunnerJob(Runner):
     """Submit and manage a Crunch job that runs crunch_scripts/cwl-runner."""
 
-    def upload_workflow_collection(self, packed):
-        collection = arvados.collection.Collection(api_client=self.arvrunner.api,
-                                                   keep_client=self.arvrunner.keep_client,
-                                                   num_retries=self.arvrunner.num_retries)
-        with collection.open("workflow.cwl", "w") as f:
-            f.write(yaml.round_trip_dump(packed))
-
-        filters = [["portable_data_hash", "=", collection.portable_data_hash()],
-                   ["name", "like", self.name+"%"]]
-        if self.arvrunner.project_uuid:
-            filters.append(["owner_uuid", "=", self.arvrunner.project_uuid])
-        exists = self.arvrunner.api.collections().list(filters=filters).execute(num_retries=self.arvrunner.num_retries)
-
-        if exists["items"]:
-            logger.info("Using collection %s", exists["items"][0]["uuid"])
-        else:
-            collection.save_new(name=self.name,
-                                owner_uuid=self.arvrunner.project_uuid,
-                                ensure_unique_name=True,
-                                num_retries=self.arvrunner.num_retries)
-            logger.info("Uploaded to %s", collection.manifest_locator())
-
-        return collection.portable_data_hash()
-
     def arvados_job_spec(self, dry_run=False, pull_image=True, **kwargs):
         """Create an Arvados job specification for this workflow.
 
@@ -271,11 +280,13 @@ class RunnerJob(Runner):
         if self.tool.tool["id"].startswith("keep:"):
             self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
         else:
-            packed = packed_workflow(self.arvrunner, self.tool)
-            wf_pdh = self.upload_workflow_collection(packed)
+            packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
+            wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
             self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
 
         adjustDirObjs(self.job_order, trim_listing)
+        visit_class(self.job_order, ("File", "Directory"), trim_anonymous_location)
+        visit_class(self.job_order, ("File", "Directory"), remove_redundant_fields)
 
         if self.output_name:
             self.job_order["arv:output_name"] = self.output_name
@@ -288,6 +299,9 @@ class RunnerJob(Runner):
         if self.on_error:
             self.job_order["arv:on_error"] = self.on_error
 
+        if kwargs.get("debug"):
+            self.job_order["arv:debug"] = True
+
         return {
             "script": "cwl-runner",
             "script_version": "master",
@@ -316,12 +330,20 @@ class RunnerJob(Runner):
 
         del job_spec["owner_uuid"]
         job_spec["job"] = job
+
+        instance_spec = {
+            "owner_uuid": self.arvrunner.project_uuid,
+            "name": self.name,
+            "components": {
+                "cwl-runner": job_spec,
+            },
+            "state": "RunningOnServer",
+        }
+        if not self.enable_reuse:
+            instance_spec["properties"] = {"run_options": {"enable_job_reuse": False}}
+
         self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().create(
-            body={
-                "owner_uuid": self.arvrunner.project_uuid,
-                "name": self.name,
-                "components": {"cwl-runner": job_spec },
-                "state": "RunningOnServer"}).execute(num_retries=self.arvrunner.num_retries)
+            body=instance_spec).execute(num_retries=self.arvrunner.num_retries)
         logger.info("Created pipeline %s", self.arvrunner.pipeline["uuid"])
 
         if kwargs.get("wait") is False:
@@ -348,7 +370,7 @@ class RunnerTemplate(object):
     }
 
     def __init__(self, runner, tool, job_order, enable_reuse, uuid,
-                 submit_runner_ram=0, name=None):
+                 submit_runner_ram=0, name=None, merged_map=None):
         self.runner = runner
         self.tool = tool
         self.job = RunnerJob(
@@ -359,7 +381,8 @@ class RunnerTemplate(object):
             output_name=None,
             output_tags=None,
             submit_runner_ram=submit_runner_ram,
-            name=name)
+            name=name,
+            merged_map=merged_map)
         self.uuid = uuid
 
     def pipeline_component_spec(self):