11557: Merge branch 'master' into 11557-acr-output-col-perms
[arvados.git] / sdk / cwl / arvados_cwl / arvjob.py
index b7f72a97d69eccd5d872861c197b63b9ba9873a6..a620a207497b514fda743f6e7384c562369055d7 100644 (file)
@@ -9,7 +9,7 @@ from cwltool.errors import WorkflowException
 from cwltool.draft2tool import revmap_file, CommandLineTool
 from cwltool.load_tool import fetch_document
 from cwltool.builder import Builder
-from cwltool.pathmapper import adjustDirObjs
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
 
 from schema_salad.sourceline import SourceLine
 
@@ -18,8 +18,8 @@ import ruamel.yaml as yaml
 import arvados.collection
 
 from .arvdocker import arv_docker_get_image
-from .runner import Runner, arvados_jobs_image, packed_workflow, trim_listing, upload_workflow_collection
-from .pathmapper import InitialWorkDirPathMapper
+from .runner import Runner, arvados_jobs_image, packed_workflow, upload_workflow_collection, trim_anonymous_location
+from .pathmapper import VwdPathMapper, trim_listing
 from .perf import Perf
 from . import done
 from ._version import __version__
@@ -51,8 +51,8 @@ class ArvadosJob(object):
                                                     keep_client=self.arvrunner.keep_client,
                                                     num_retries=self.arvrunner.num_retries)
                 script_parameters["task.vwd"] = {}
-                generatemapper = InitialWorkDirPathMapper([self.generatefiles], "", "",
-                                                          separateDirs=False)
+                generatemapper = VwdPathMapper([self.generatefiles], "", "",
+                                               separateDirs=False)
 
                 with Perf(metrics, "createfiles %s" % self.name):
                     for f, p in generatemapper.items():
@@ -60,8 +60,9 @@ class ArvadosJob(object):
                             with vwd.open(p.target, "w") as n:
                                 n.write(p.resolved.encode("utf-8"))
 
-                with Perf(metrics, "generatefiles.save_new %s" % self.name):
-                    vwd.save_new()
+                if vwd:
+                    with Perf(metrics, "generatefiles.save_new %s" % self.name):
+                        vwd.save_new()
 
                 for f, p in generatemapper.items():
                     if p.type == "File":
@@ -109,6 +110,7 @@ class ArvadosJob(object):
         if runtime_req:
             if "keep_cache" in runtime_req:
                 runtime_constraints["keep_cache_mb_per_task"] = runtime_req["keep_cache"]
+                runtime_constraints["min_ram_mb_per_node"] += runtime_req["keep_cache"]
             if "outputDirType" in runtime_req:
                 if runtime_req["outputDirType"] == "local_output_dir":
                     script_parameters["task.keepTmpOutput"] = False
@@ -253,6 +255,8 @@ class RunnerJob(Runner):
             self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
 
         adjustDirObjs(self.job_order, trim_listing)
+        adjustFileObjs(self.job_order, trim_anonymous_location)
+        adjustDirObjs(self.job_order, trim_anonymous_location)
 
         if self.output_name:
             self.job_order["arv:output_name"] = self.output_name
@@ -287,6 +291,30 @@ class RunnerJob(Runner):
             find_or_create=self.enable_reuse
         ).execute(num_retries=self.arvrunner.num_retries)
 
+        if self.enable_reuse:
+            # When reusing jobs, copy its output/log collection to the desired project
+            reused_collections = [('Output', job.get('output', None)),
+                                  ('Log', job.get('log', None))]
+            for col_type, pdh in [(n, p) for n, p in reused_collections if p]:
+                c = arvados.collection.Collection(pdh,
+                                                  api_client=self.arvrunner.api,
+                                                  keep_client=self.arvrunner.keep_client,
+                                                  num_retries=self.arvrunner.num_retries)
+                c.save_new(name="{} of {}".format(col_type, self.name),
+                           owner_uuid=self.arvrunner.project_uuid,
+                           ensure_unique_name=True,
+                           num_retries=self.arvrunner.num_retries)
+                logger.info("Copied reused job's %s to collection %s",
+                            col_type.lower(),
+                            c.manifest_locator())
+            # Give read permission to the desired project on reused jobs
+            for job_name, job_uuid in job.get('components', {}).items():
+                self.arvrunner.api.links().create(body={
+                    'link_class': 'can_read',
+                    'tail_uuid': self.arvrunner.project_uuid,
+                    'head_uuid': job_uuid,
+                    }).execute(num_retries=self.arvrunner.num_retries)
+
         for k,v in job_spec["script_parameters"].items():
             if v is False or v is None or isinstance(v, dict):
                 job_spec["script_parameters"][k] = {"value": v}